diff --git a/CHANGELOG.md b/CHANGELOG.md index e8274d27..fc1b9c15 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,12 +9,12 @@ ## Changelog +### **HEAD -> main** 2022/02/07 mandic00@live.com + + ### **2.6.2** 2022/02/07 mandic00@live.com -### **origin/main** 2022/01/20 mandic00@live.com - - ### **release: 2.6.1** 2022/01/20 mandic00@live.com @@ -143,7 +143,6 @@ - minor blazepose optimizations - compress samples -- remove handdetect from default package - remove posenet from default package - enhanced movenet postprocessing - use transferrable buffer for worker messages @@ -232,8 +231,6 @@ - implement event emitters - fix iife loader - simplify dependencies -- fix file permissions -- remove old build server - change build process - add benchmark info - simplify canvas handling in nodejs @@ -276,7 +273,6 @@ ### **2.1.1** 2021/07/29 mandic00@live.com -- proposal #141 - add note on manually disping tensor - modularize model loading @@ -355,8 +351,6 @@ ### **1.9.1** 2021/05/21 mandic00@live.com - caching improvements -- sanitize server input -- remove nanodet weights from default distribution - add experimental mb3-centernet object detection - individual model skipframes values still max high threshold for caching - config.videooptimized has been removed and config.cachesensitivity has been added instead @@ -524,7 +518,6 @@ - add experimental nanodet object detection - full models signature -- cleanup ### **1.1.7** 2021/03/16 mandic00@live.com @@ -570,7 +563,6 @@ ### **1.0.3** 2021/03/10 mandic00@live.com - strong typing for public classes and hide private classes -- re-added blazeface-front - enhanced age, gender, emotion detection - full rebuild @@ -596,7 +588,6 @@ - 0.40.5 - fix human.draw - 0.40.4 -- cleanup blazepose code - fix demo - 0.40.3 - 0.40.2 @@ -619,10 +610,7 @@ - 0.20.11 - 0.20.10 - 0.20.9 -- remove extra items -- simmilarity fix - 0.20.8 -- embedding fix - 0.20.7 - build fix - 0.20.6 @@ -656,7 +644,6 @@ ### **0.9.26** 2021/01/18 mandic00@live.com - fix face detection when mesh is disabled -- added minification notes - version bump ### **0.9.25** 2021/01/13 mandic00@live.com @@ -718,7 +705,6 @@ - conditional hand rotation - staggered skipframes -- fix permissions ### **0.9.13** 2020/12/08 mandic00@live.com @@ -827,7 +813,6 @@ - optimized model loader - merge branch 'main' of https://github.com/vladmandic/human into main - created wiki -- delete bug_report.md - optimize font resizing - fix nms sync call @@ -851,7 +836,6 @@ - optimized camera and mobile layout - fixed worker and filter compatibility -- removed test code ### **0.7.2** 2020/11/04 mandic00@live.com @@ -928,7 +912,6 @@ ### **0.4.8** 2020/10/28 mandic00@live.com - revert "updated menu handler" -- fix webpack compatibility issue ### **0.4.7** 2020/10/27 mandic00@live.com @@ -1016,7 +999,6 @@ ### **0.2.8** 2020/10/13 mandic00@live.com -- added example image ### **0.2.7** 2020/10/13 mandic00@live.com @@ -1032,7 +1014,6 @@ ### **0.2.4** 2020/10/12 mandic00@live.com -- removed extra files ### **0.2.3** 2020/10/12 mandic00@live.com diff --git a/demo/faceid/index.js b/demo/faceid/index.js index 9184b9bb..c29a04e1 100644 --- a/demo/faceid/index.js +++ b/demo/faceid/index.js @@ -4,329 +4,8 @@ author: ' */ -// demo/faceid/index.ts -import { Human } from "../../dist/human.esm.js"; - -// demo/faceid/indexdb.ts -var db; -var database = "human"; -var table = "person"; -var log = (...msg) => console.log("indexdb", ...msg); -async function open() { - if (db) - return true; - return new Promise((resolve) => { - const request = indexedDB.open(database, 1); - request.onerror = (evt) => log("error:", evt); - request.onupgradeneeded = (evt) => { - log("create:", evt.target); - db = evt.target.result; - db.createObjectStore(table, { keyPath: "id", autoIncrement: true }); - }; - request.onsuccess = (evt) => { - db = evt.target.result; - log("open:", db); - resolve(true); - }; - }); -} -async function load() { - const faceDB = []; - if (!db) - await open(); - return new Promise((resolve) => { - const cursor = db.transaction([table], "readwrite").objectStore(table).openCursor(null, "next"); - cursor.onerror = (evt) => log("load error:", evt); - cursor.onsuccess = (evt) => { - if (evt.target.result) { - faceDB.push(evt.target.result.value); - evt.target.result.continue(); - } else { - resolve(faceDB); - } - }; - }); -} -async function count() { - if (!db) - await open(); - return new Promise((resolve) => { - const store = db.transaction([table], "readwrite").objectStore(table).count(); - store.onerror = (evt) => log("count error:", evt); - store.onsuccess = () => resolve(store.result); - }); -} -async function save(faceRecord) { - if (!db) - await open(); - const newRecord = { name: faceRecord.name, descriptor: faceRecord.descriptor, image: faceRecord.image }; - db.transaction([table], "readwrite").objectStore(table).put(newRecord); - log("save:", newRecord); -} -async function remove(faceRecord) { - if (!db) - await open(); - db.transaction([table], "readwrite").objectStore(table).delete(faceRecord.id); - log("delete:", faceRecord); -} - -// demo/faceid/index.ts -var humanConfig = { - modelBasePath: "../../models", - filter: { equalization: true }, - face: { - enabled: true, - detector: { rotation: true, return: true, cropFactor: 1.6, mask: false }, - description: { enabled: true }, - mobilefacenet: { enabled: false, modelPath: "https://vladmandic.github.io/human-models/models/mobilefacenet.json" }, - iris: { enabled: true }, - emotion: { enabled: false }, - antispoof: { enabled: true }, - liveness: { enabled: true } - }, - body: { enabled: false }, - hand: { enabled: false }, - object: { enabled: false }, - gesture: { enabled: true } -}; -var matchOptions = { order: 2, multiplier: 25, min: 0.2, max: 0.8 }; -var options = { - minConfidence: 0.6, - minSize: 224, - maxTime: 1e4, - blinkMin: 10, - blinkMax: 800, - threshold: 0.5, - mask: humanConfig.face.detector.mask, - rotation: humanConfig.face.detector.rotation, - cropFactor: humanConfig.face.detector.cropFactor, - ...matchOptions -}; -var ok = { - faceCount: false, - faceConfidence: false, - facingCenter: false, - lookingCenter: false, - blinkDetected: false, - faceSize: false, - antispoofCheck: false, - livenessCheck: false, - elapsedMs: 0 -}; -var allOk = () => ok.faceCount && ok.faceSize && ok.blinkDetected && ok.facingCenter && ok.lookingCenter && ok.faceConfidence && ok.antispoofCheck && ok.livenessCheck; -var current = { face: null, record: null }; -var blink = { - start: 0, - end: 0, - time: 0 -}; -var human = new Human(humanConfig); -human.env["perfadd"] = false; -human.draw.options.font = 'small-caps 18px "Lato"'; -human.draw.options.lineHeight = 20; -var dom = { - video: document.getElementById("video"), - canvas: document.getElementById("canvas"), - log: document.getElementById("log"), - fps: document.getElementById("fps"), - match: document.getElementById("match"), - name: document.getElementById("name"), - save: document.getElementById("save"), - delete: document.getElementById("delete"), - retry: document.getElementById("retry"), - source: document.getElementById("source"), - ok: document.getElementById("ok") -}; -var timestamp = { detect: 0, draw: 0 }; -var fps = { detect: 0, draw: 0 }; -var startTime = 0; -var log2 = (...msg) => { - dom.log.innerText += msg.join(" ") + "\n"; - console.log(...msg); -}; -var printFPS = (msg) => dom.fps.innerText = msg; -async function webCam() { - printFPS("starting webcam..."); - const cameraOptions = { audio: false, video: { facingMode: "user", resizeMode: "none", width: { ideal: document.body.clientWidth } } }; - const stream = await navigator.mediaDevices.getUserMedia(cameraOptions); - const ready = new Promise((resolve) => { - dom.video.onloadeddata = () => resolve(true); - }); - dom.video.srcObject = stream; - dom.video.play(); - await ready; - dom.canvas.width = dom.video.videoWidth; - dom.canvas.height = dom.video.videoHeight; - if (human.env.initial) - log2("video:", dom.video.videoWidth, dom.video.videoHeight, "|", stream.getVideoTracks()[0].label); - dom.canvas.onclick = () => { - if (dom.video.paused) - dom.video.play(); - else - dom.video.pause(); - }; -} -async function detectionLoop() { - if (!dom.video.paused) { - if (current.face && current.face.tensor) - human.tf.dispose(current.face.tensor); - await human.detect(dom.video); - const now = human.now(); - fps.detect = 1e3 / (now - timestamp.detect); - timestamp.detect = now; - requestAnimationFrame(detectionLoop); - } -} -async function validationLoop() { - const interpolated = await human.next(human.result); - await human.draw.canvas(dom.video, dom.canvas); - await human.draw.all(dom.canvas, interpolated); - const now = human.now(); - fps.draw = 1e3 / (now - timestamp.draw); - timestamp.draw = now; - printFPS(`fps: ${fps.detect.toFixed(1).padStart(5, " ")} detect | ${fps.draw.toFixed(1).padStart(5, " ")} draw`); - ok.faceCount = human.result.face.length === 1; - if (ok.faceCount) { - const gestures = Object.values(human.result.gesture).map((gesture) => gesture.gesture); - if (gestures.includes("blink left eye") || gestures.includes("blink right eye")) - blink.start = human.now(); - if (blink.start > 0 && !gestures.includes("blink left eye") && !gestures.includes("blink right eye")) - blink.end = human.now(); - ok.blinkDetected = ok.blinkDetected || Math.abs(blink.end - blink.start) > options.blinkMin && Math.abs(blink.end - blink.start) < options.blinkMax; - if (ok.blinkDetected && blink.time === 0) - blink.time = Math.trunc(blink.end - blink.start); - ok.facingCenter = gestures.includes("facing center"); - ok.lookingCenter = gestures.includes("looking center"); - ok.faceConfidence = (human.result.face[0].boxScore || 0) > options.minConfidence && (human.result.face[0].faceScore || 0) > options.minConfidence && (human.result.face[0].genderScore || 0) > options.minConfidence; - ok.antispoofCheck = (human.result.face[0].real || 0) > options.minConfidence; - ok.livenessCheck = (human.result.face[0].live || 0) > options.minConfidence; - ok.faceSize = human.result.face[0].box[2] >= options.minSize && human.result.face[0].box[3] >= options.minSize; - } - let y = 32; - for (const [key, val] of Object.entries(ok)) { - let el = document.getElementById(`ok-${key}`); - if (!el) { - el = document.createElement("div"); - el.innerText = key; - el.className = "ok"; - el.style.top = `${y}px`; - dom.ok.appendChild(el); - } - if (typeof val === "boolean") - el.style.backgroundColor = val ? "lightgreen" : "lightcoral"; - else - el.innerText = `${key}:${val}`; - y += 28; - } - if (allOk()) { - dom.video.pause(); - return human.result.face[0]; - } - if (ok.elapsedMs > options.maxTime) { - dom.video.pause(); - return human.result.face[0]; - } else { - ok.elapsedMs = Math.trunc(human.now() - startTime); - return new Promise((resolve) => { - setTimeout(async () => { - const res = await validationLoop(); - if (res) - resolve(human.result.face[0]); - }, 30); - }); - } -} -async function saveRecords() { - var _a, _b; - if (dom.name.value.length > 0) { - const image = (_a = dom.canvas.getContext("2d")) == null ? void 0 : _a.getImageData(0, 0, dom.canvas.width, dom.canvas.height); - const rec = { id: 0, name: dom.name.value, descriptor: (_b = current.face) == null ? void 0 : _b.embedding, image }; - await save(rec); - log2("saved face record:", rec.name); - } else { - log2("invalid name"); - } -} -async function deleteRecord() { - if (current.record && current.record.id > 0) { - await remove(current.record); - } -} -async function detectFace() { - var _a, _b; - (_a = dom.canvas.getContext("2d")) == null ? void 0 : _a.clearRect(0, 0, options.minSize, options.minSize); - if (!current.face || !current.face.tensor || !current.face.embedding) - return false; - console.log("face record:", current.face); - human.tf.browser.toPixels(current.face.tensor, dom.canvas); - if (await count() === 0) { - log2("face database is empty"); - document.body.style.background = "black"; - dom.delete.style.display = "none"; - return false; - } - const db2 = await load(); - const descriptors = db2.map((rec) => rec.descriptor); - const res = await human.match(current.face.embedding, descriptors, matchOptions); - current.record = db2[res.index] || null; - if (current.record) { - log2(`best match: ${current.record.name} | id: ${current.record.id} | similarity: ${Math.round(1e3 * res.similarity) / 10}%`); - dom.name.value = current.record.name; - dom.source.style.display = ""; - (_b = dom.source.getContext("2d")) == null ? void 0 : _b.putImageData(current.record.image, 0, 0); - } - document.body.style.background = res.similarity > options.threshold ? "darkgreen" : "maroon"; - return res.similarity > options.threshold; -} -async function main() { - var _a, _b, _c, _d; - ok.faceCount = false; - ok.faceConfidence = false; - ok.facingCenter = false; - ok.blinkDetected = false; - ok.faceSize = false; - ok.antispoofCheck = false; - ok.livenessCheck = false; - ok.elapsedMs = 0; - dom.match.style.display = "none"; - dom.retry.style.display = "none"; - dom.source.style.display = "none"; - document.body.style.background = "black"; - await webCam(); - await detectionLoop(); - startTime = human.now(); - current.face = await validationLoop(); - dom.canvas.width = ((_b = (_a = current.face) == null ? void 0 : _a.tensor) == null ? void 0 : _b.shape[1]) || options.minSize; - dom.canvas.height = ((_d = (_c = current.face) == null ? void 0 : _c.tensor) == null ? void 0 : _d.shape[0]) || options.minSize; - dom.source.width = dom.canvas.width; - dom.source.height = dom.canvas.height; - dom.canvas.style.width = ""; - dom.match.style.display = "flex"; - dom.save.style.display = "flex"; - dom.delete.style.display = "flex"; - dom.retry.style.display = "block"; - if (!allOk()) { - log2("did not find valid face"); - return false; - } else { - return detectFace(); - } -} -async function init() { - log2("human version:", human.version, "| tfjs version:", human.tf.version["tfjs-core"]); - log2("options:", JSON.stringify(options).replace(/{|}|"|\[|\]/g, "").replace(/,/g, " ")); - printFPS("loading..."); - log2("known face records:", await count()); - await webCam(); - await human.load(); - printFPS("initializing..."); - dom.retry.addEventListener("click", main); - dom.save.addEventListener("click", saveRecords); - dom.delete.addEventListener("click", deleteRecord); - await human.warmup(); - await main(); -} -window.onload = init; +import{Human as H}from"../../dist/human.esm.js";var d,R="human",m="person",g=(...t)=>console.log("indexdb",...t);async function b(){return d?!0:new Promise(t=>{let i=indexedDB.open(R,1);i.onerror=s=>g("error:",s),i.onupgradeneeded=s=>{g("create:",s.target),d=s.target.result,d.createObjectStore(m,{keyPath:"id",autoIncrement:!0})},i.onsuccess=s=>{d=s.target.result,g("open:",d),t(!0)}})}async function C(){let t=[];return d||await b(),new Promise(i=>{let s=d.transaction([m],"readwrite").objectStore(m).openCursor(null,"next");s.onerror=o=>g("load error:",o),s.onsuccess=o=>{o.target.result?(t.push(o.target.result.value),o.target.result.continue()):i(t)}})}async function k(){return d||await b(),new Promise(t=>{let i=d.transaction([m],"readwrite").objectStore(m).count();i.onerror=s=>g("count error:",s),i.onsuccess=()=>t(i.result)})}async function x(t){d||await b();let i={name:t.name,descriptor:t.descriptor,image:t.image};d.transaction([m],"readwrite").objectStore(m).put(i),g("save:",i)}async function D(t){d||await b(),d.transaction([m],"readwrite").objectStore(m).delete(t.id),g("delete:",t)}var v={modelBasePath:"../../models",filter:{equalization:!0},face:{enabled:!0,detector:{rotation:!0,return:!0,cropFactor:1.6,mask:!1},description:{enabled:!0},mobilefacenet:{enabled:!1,modelPath:"https://vladmandic.github.io/human-models/models/mobilefacenet.json"},iris:{enabled:!0},emotion:{enabled:!1},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!1},hand:{enabled:!1},object:{enabled:!1},gesture:{enabled:!0}},I={order:2,multiplier:25,min:.2,max:.8},c={minConfidence:.6,minSize:224,maxTime:1e4,blinkMin:10,blinkMax:800,threshold:.5,mask:v.face.detector.mask,rotation:v.face.detector.rotation,cropFactor:v.face.detector.cropFactor,...I},n={faceCount:!1,faceConfidence:!1,facingCenter:!1,lookingCenter:!1,blinkDetected:!1,faceSize:!1,antispoofCheck:!1,livenessCheck:!1,elapsedMs:0},M=()=>n.faceCount&&n.faceSize&&n.blinkDetected&&n.facingCenter&&n.lookingCenter&&n.faceConfidence&&n.antispoofCheck&&n.livenessCheck,r={face:null,record:null},l={start:0,end:0,time:0},a=new H(v);a.env.perfadd=!1;a.draw.options.font='small-caps 18px "Lato"';a.draw.options.lineHeight=20;var e={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("fps"),match:document.getElementById("match"),name:document.getElementById("name"),save:document.getElementById("save"),delete:document.getElementById("delete"),retry:document.getElementById("retry"),source:document.getElementById("source"),ok:document.getElementById("ok")},h={detect:0,draw:0},y={detect:0,draw:0},E=0,p=(...t)=>{e.log.innerText+=t.join(" ")+` +`,console.log(...t)},w=t=>e.fps.innerText=t;async function S(){w("starting webcam...");let t={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth}}},i=await navigator.mediaDevices.getUserMedia(t),s=new Promise(o=>{e.video.onloadeddata=()=>o(!0)});e.video.srcObject=i,e.video.play(),await s,e.canvas.width=e.video.videoWidth,e.canvas.height=e.video.videoHeight,a.env.initial&&p("video:",e.video.videoWidth,e.video.videoHeight,"|",i.getVideoTracks()[0].label),e.canvas.onclick=()=>{e.video.paused?e.video.play():e.video.pause()}}async function T(){if(!e.video.paused){r.face&&r.face.tensor&&a.tf.dispose(r.face.tensor),await a.detect(e.video);let t=a.now();y.detect=1e3/(t-h.detect),h.detect=t,requestAnimationFrame(T)}}async function L(){let t=await a.next(a.result);await a.draw.canvas(e.video,e.canvas),await a.draw.all(e.canvas,t);let i=a.now();if(y.draw=1e3/(i-h.draw),h.draw=i,w(`fps: ${y.detect.toFixed(1).padStart(5," ")} detect | ${y.draw.toFixed(1).padStart(5," ")} draw`),n.faceCount=a.result.face.length===1,n.faceCount){let o=Object.values(a.result.gesture).map(f=>f.gesture);(o.includes("blink left eye")||o.includes("blink right eye"))&&(l.start=a.now()),l.start>0&&!o.includes("blink left eye")&&!o.includes("blink right eye")&&(l.end=a.now()),n.blinkDetected=n.blinkDetected||Math.abs(l.end-l.start)>c.blinkMin&&Math.abs(l.end-l.start)c.minConfidence&&(a.result.face[0].faceScore||0)>c.minConfidence&&(a.result.face[0].genderScore||0)>c.minConfidence,n.antispoofCheck=(a.result.face[0].real||0)>c.minConfidence,n.livenessCheck=(a.result.face[0].live||0)>c.minConfidence,n.faceSize=a.result.face[0].box[2]>=c.minSize&&a.result.face[0].box[3]>=c.minSize}let s=32;for(let[o,f]of Object.entries(n)){let u=document.getElementById(`ok-${o}`);u||(u=document.createElement("div"),u.innerText=o,u.className="ok",u.style.top=`${s}px`,e.ok.appendChild(u)),typeof f=="boolean"?u.style.backgroundColor=f?"lightgreen":"lightcoral":u.innerText=`${o}:${f}`,s+=28}return M()||n.elapsedMs>c.maxTime?(e.video.pause(),a.result.face[0]):(n.elapsedMs=Math.trunc(a.now()-E),new Promise(o=>{setTimeout(async()=>{await L()&&o(a.result.face[0])},30)}))}async function P(){var t,i;if(e.name.value.length>0){let s=(t=e.canvas.getContext("2d"))==null?void 0:t.getImageData(0,0,e.canvas.width,e.canvas.height),o={id:0,name:e.name.value,descriptor:(i=r.face)==null?void 0:i.embedding,image:s};await x(o),p("saved face record:",o.name)}else p("invalid name")}async function z(){r.record&&r.record.id>0&&await D(r.record)}async function j(){var o,f;if((o=e.canvas.getContext("2d"))==null||o.clearRect(0,0,c.minSize,c.minSize),!r.face||!r.face.tensor||!r.face.embedding)return!1;if(console.log("face record:",r.face),a.tf.browser.toPixels(r.face.tensor,e.canvas),await k()===0)return p("face database is empty"),document.body.style.background="black",e.delete.style.display="none",!1;let t=await C(),i=t.map(u=>u.descriptor),s=await a.match(r.face.embedding,i,I);return r.record=t[s.index]||null,r.record&&(p(`best match: ${r.record.name} | id: ${r.record.id} | similarity: ${Math.round(1e3*s.similarity)/10}%`),e.name.value=r.record.name,e.source.style.display="",(f=e.source.getContext("2d"))==null||f.putImageData(r.record.image,0,0)),document.body.style.background=s.similarity>c.threshold?"darkgreen":"maroon",s.similarity>c.threshold}async function B(){var t,i,s,o;return n.faceCount=!1,n.faceConfidence=!1,n.facingCenter=!1,n.blinkDetected=!1,n.faceSize=!1,n.antispoofCheck=!1,n.livenessCheck=!1,n.elapsedMs=0,e.match.style.display="none",e.retry.style.display="none",e.source.style.display="none",document.body.style.background="black",await S(),await T(),E=a.now(),r.face=await L(),e.canvas.width=((i=(t=r.face)==null?void 0:t.tensor)==null?void 0:i.shape[1])||c.minSize,e.canvas.height=((o=(s=r.face)==null?void 0:s.tensor)==null?void 0:o.shape[0])||c.minSize,e.source.width=e.canvas.width,e.source.height=e.canvas.height,e.canvas.style.width="",e.match.style.display="flex",e.save.style.display="flex",e.delete.style.display="flex",e.retry.style.display="block",M()?j():(p("did not find valid face"),!1)}async function q(){p("human version:",a.version,"| tfjs version:",a.tf.version["tfjs-core"]),p("options:",JSON.stringify(c).replace(/{|}|"|\[|\]/g,"").replace(/,/g," ")),w("loading..."),p("known face records:",await k()),await S(),await a.load(),w("initializing..."),e.retry.addEventListener("click",B),e.save.addEventListener("click",P),e.delete.addEventListener("click",z),await a.warmup(),await B()}window.onload=q; /** * Human demo for browsers * @default Human Library diff --git a/demo/typescript/index.js b/demo/typescript/index.js index 5f1bb67c..81c8002a 100644 --- a/demo/typescript/index.js +++ b/demo/typescript/index.js @@ -4,100 +4,8 @@ author: ' */ -// demo/typescript/index.ts -import { Human } from "../../dist/human.esm.js"; -var humanConfig = { - modelBasePath: "../../models", - filter: { enabled: true, equalization: false }, - face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } }, - body: { enabled: true }, - hand: { enabled: true }, - object: { enabled: false }, - gesture: { enabled: true } -}; -var human = new Human(humanConfig); -human.env["perfadd"] = false; -human.draw.options.font = 'small-caps 18px "Lato"'; -human.draw.options.lineHeight = 20; -var dom = { - video: document.getElementById("video"), - canvas: document.getElementById("canvas"), - log: document.getElementById("log"), - fps: document.getElementById("status"), - perf: document.getElementById("performance") -}; -var timestamp = { detect: 0, draw: 0, tensors: 0 }; -var fps = { detect: 0, draw: 0 }; -var log = (...msg) => { - dom.log.innerText += msg.join(" ") + "\n"; - console.log(...msg); -}; -var status = (msg) => dom.fps.innerText = msg; -var perf = (msg) => dom.perf.innerText = "tensors:" + human.tf.memory().numTensors + " | performance: " + JSON.stringify(msg).replace(/"|{|}/g, "").replace(/,/g, " | "); -async function webCam() { - status("starting webcam..."); - const options = { audio: false, video: { facingMode: "user", resizeMode: "none", width: { ideal: document.body.clientWidth } } }; - const stream = await navigator.mediaDevices.getUserMedia(options); - const ready = new Promise((resolve) => { - dom.video.onloadeddata = () => resolve(true); - }); - dom.video.srcObject = stream; - dom.video.play(); - await ready; - dom.canvas.width = dom.video.videoWidth; - dom.canvas.height = dom.video.videoHeight; - const track = stream.getVideoTracks()[0]; - const capabilities = track.getCapabilities ? track.getCapabilities() : ""; - const settings = track.getSettings ? track.getSettings() : ""; - const constraints = track.getConstraints ? track.getConstraints() : ""; - log("video:", dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities }); - dom.canvas.onclick = () => { - if (dom.video.paused) - dom.video.play(); - else - dom.video.pause(); - }; -} -async function detectionLoop() { - if (!dom.video.paused) { - await human.detect(dom.video); - const tensors = human.tf.memory().numTensors; - if (tensors - timestamp.tensors !== 0) - log("allocated tensors:", tensors - timestamp.tensors); - timestamp.tensors = tensors; - } - const now = human.now(); - fps.detect = 1e3 / (now - timestamp.detect); - timestamp.detect = now; - requestAnimationFrame(detectionLoop); -} -async function drawLoop() { - if (!dom.video.paused) { - const interpolated = await human.next(human.result); - await human.draw.canvas(dom.video, dom.canvas); - await human.draw.all(dom.canvas, interpolated); - perf(interpolated.performance); - } - const now = human.now(); - fps.draw = 1e3 / (now - timestamp.draw); - timestamp.draw = now; - status(dom.video.paused ? "paused" : `fps: ${fps.detect.toFixed(1).padStart(5, " ")} detect | ${fps.draw.toFixed(1).padStart(5, " ")} draw`); - setTimeout(drawLoop, 30); -} -async function main() { - log("human version:", human.version, "| tfjs version:", human.tf.version["tfjs-core"]); - log("platform:", human.env.platform, "| agent:", human.env.agent); - status("loading..."); - await human.load(); - log("backend:", human.tf.getBackend(), "| available:", human.env.backends); - log("loaded models:", Object.values(human.models).filter((model) => model !== null).length); - status("initializing..."); - await human.warmup(); - await webCam(); - await detectionLoop(); - await drawLoop(); -} -window.onload = main; +import{Human as p}from"../../dist/human.esm.js";var w={modelBasePath:"../../models",filter:{enabled:!0,equalization:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},gesture:{enabled:!0}},t=new p(w);t.env.perfadd=!1;t.draw.options.font='small-caps 18px "Lato"';t.draw.options.lineHeight=20;var e={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},i={detect:0,draw:0,tensors:0},d={detect:0,draw:0},s=(...a)=>{e.log.innerText+=a.join(" ")+` +`,console.log(...a)},r=a=>e.fps.innerText=a,b=a=>e.perf.innerText="tensors:"+t.tf.memory().numTensors+" | performance: "+JSON.stringify(a).replace(/"|{|}/g,"").replace(/,/g," | ");async function h(){r("starting webcam...");let a={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth}}},n=await navigator.mediaDevices.getUserMedia(a),m=new Promise(f=>{e.video.onloadeddata=()=>f(!0)});e.video.srcObject=n,e.video.play(),await m,e.canvas.width=e.video.videoWidth,e.canvas.height=e.video.videoHeight;let o=n.getVideoTracks()[0],v=o.getCapabilities?o.getCapabilities():"",g=o.getSettings?o.getSettings():"",u=o.getConstraints?o.getConstraints():"";s("video:",e.video.videoWidth,e.video.videoHeight,o.label,{stream:n,track:o,settings:g,constraints:u,capabilities:v}),e.canvas.onclick=()=>{e.video.paused?e.video.play():e.video.pause()}}async function c(){if(!e.video.paused){await t.detect(e.video);let n=t.tf.memory().numTensors;n-i.tensors!==0&&s("allocated tensors:",n-i.tensors),i.tensors=n}let a=t.now();d.detect=1e3/(a-i.detect),i.detect=a,requestAnimationFrame(c)}async function l(){if(!e.video.paused){let n=await t.next(t.result);await t.draw.canvas(e.video,e.canvas),await t.draw.all(e.canvas,n),b(n.performance)}let a=t.now();d.draw=1e3/(a-i.draw),i.draw=a,r(e.video.paused?"paused":`fps: ${d.detect.toFixed(1).padStart(5," ")} detect | ${d.draw.toFixed(1).padStart(5," ")} draw`),setTimeout(l,30)}async function y(){s("human version:",t.version,"| tfjs version:",t.tf.version["tfjs-core"]),s("platform:",t.env.platform,"| agent:",t.env.agent),r("loading..."),await t.load(),s("backend:",t.tf.getBackend(),"| available:",t.env.backends),s("loaded models:",Object.values(t.models).filter(a=>a!==null).length),r("initializing..."),await t.warmup(),await h(),await c(),await l()}window.onload=y; /** * Human demo for browsers * @default Human Library diff --git a/dist/human.d.ts b/dist/human.d.ts new file mode 100644 index 00000000..fca0ad10 --- /dev/null +++ b/dist/human.d.ts @@ -0,0 +1,2520 @@ +/// + +/** meta-function that performs draw for: canvas, face, body, hand */ +declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial): Promise<[void, void, void, void, void] | null>; + +/** Defines all possible canvas types */ +export declare type AnyCanvas = HTMLCanvasElement | OffscreenCanvas; + +/** Defines all possible image types */ +export declare type AnyImage = HTMLImageElement | typeof Image; + +/** Defines all possible video types */ +export declare type AnyVideo = HTMLMediaElement | HTMLVideoElement; + +/** @docalias number[] */ +declare interface ArrayMap { + R0: number; + R1: number[]; + R2: number[][]; + R3: number[][][]; + R4: number[][][][]; + R5: number[][][][][]; + R6: number[][][][][][]; +} + +/** Possible TensorFlow backends */ +export declare type BackendType = ['cpu', 'wasm', 'webgl', 'humangl', 'tensorflow', 'webgpu']; + +/** draw detected bodies */ +declare function body(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +export declare type BodyAnnotation = BodyAnnotationBlazePose | BodyAnnotationEfficientPose; + +export declare type BodyAnnotationBlazePose = 'leftLeg' | 'rightLeg' | 'torso' | 'leftArm' | 'rightArm' | 'leftEye' | 'rightEye' | 'mouth'; + +export declare type BodyAnnotationEfficientPose = 'leftLeg' | 'rightLeg' | 'torso' | 'leftArm' | 'rightArm' | 'head'; + +/** Configures all body detection specific options */ +export declare interface BodyConfig extends GenericConfig { + /** maximum number of detected bodies */ + maxDetected: number; + /** minimum confidence for a detected body before results are discarded */ + minConfidence: number; +} + +/** body gesture type */ +export declare type BodyGesture = `leaning ${'left' | 'right'}` | `raise ${'left' | 'right'} hand` | 'i give up'; + +/** Body Result keypoints */ +export declare interface BodyKeypoint { + /** body part name */ + part: BodyLandmark; + /** body part position */ + position: Point; + /** body part position normalized to 0..1 */ + positionRaw: Point; + /** body part position relative to body center in meters */ + distance?: Point; + /** body part detection score */ + score: number; +} + +export declare type BodyLandmark = BodyLandmarkPoseNet | BodyLandmarkMoveNet | BodyLandmarkEfficientNet | BodyLandmarkBlazePose; + +export declare type BodyLandmarkBlazePose = 'nose' | 'leftEyeInside' | 'leftEye' | 'leftEyeOutside' | 'rightEyeInside' | 'rightEye' | 'rightEyeOutside' | 'leftEar' | 'rightEar' | 'leftMouth' | 'rightMouth' | 'leftShoulder' | 'rightShoulder' | 'leftElbow' | 'rightElbow' | 'leftWrist' | 'rightWrist' | 'leftPinky' | 'rightPinky' | 'leftIndex' | 'rightIndex' | 'leftThumb' | 'rightThumb' | 'leftHip' | 'rightHip' | 'leftKnee' | 'rightKnee' | 'leftAnkle' | 'rightAnkle' | 'leftHeel' | 'rightHeel' | 'leftFoot' | 'rightFoot' | 'bodyCenter' | 'bodyTop' | 'leftPalm' | 'leftHand' | 'rightPalm' | 'rightHand'; + +export declare type BodyLandmarkEfficientNet = 'head' | 'neck' | 'rightShoulder' | 'rightElbow' | 'rightWrist' | 'chest' | 'leftShoulder' | 'leftElbow' | 'leftWrist' | 'bodyCenter' | 'rightHip' | 'rightKnee' | 'rightAnkle' | 'leftHip' | 'leftKnee' | 'leftAnkle'; + +export declare type BodyLandmarkMoveNet = 'nose' | 'leftEye' | 'rightEye' | 'leftEar' | 'rightEar' | 'leftShoulder' | 'rightShoulder' | 'leftElbow' | 'rightElbow' | 'leftWrist' | 'rightWrist' | 'leftHip' | 'rightHip' | 'leftKnee' | 'rightKnee' | 'leftAnkle' | 'rightAnkle'; + +export declare type BodyLandmarkPoseNet = 'nose' | 'leftEye' | 'rightEye' | 'leftEar' | 'rightEar' | 'leftShoulder' | 'rightShoulder' | 'leftElbow' | 'rightElbow' | 'leftWrist' | 'rightWrist' | 'leftHip' | 'rightHip' | 'leftKnee' | 'rightKnee' | 'leftAnkle' | 'rightAnkle'; + +/** Body results */ +export declare interface BodyResult { + /** body id */ + id: number; + /** body detection score */ + score: number; + /** detected body box */ + box: Box; + /** detected body box normalized to 0..1 */ + boxRaw: Box; + /** detected body keypoints */ + keypoints: Array; + /** detected body keypoints combined into annotated parts */ + annotations: Record; +} + +/** generic box as [x, y, width, height] */ +export declare type Box = [number, number, number, number]; + +/** + * Creates an IOHandler that loads model artifacts from user-selected files. + * + * This method can be used for loading from files such as user-selected files + * in the browser. + * When used in conjunction with `tf.loadLayersModel`, an instance of + * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts. + * + * ```js + * // Note: This code snippet won't run properly without the actual file input + * // elements in the HTML DOM. + * + * // Suppose there are two HTML file input (``) + * // elements. + * const uploadJSONInput = document.getElementById('upload-json'); + * const uploadWeightsInput = document.getElementById('upload-weights'); + * const model = await tf.loadLayersModel(tf.io.browserFiles( + * [uploadJSONInput.files[0], uploadWeightsInput.files[0]])); + * ``` + * + * @param files `File`s to load from. Currently, this function supports only + * loading from files that contain Keras-style models (i.e., `tf.Model`s), for + * which an `Array` of `File`s is expected (in that order): + * - A JSON file containing the model topology and weight manifest. + * - Optionally, One or more binary files containing the binary weights. + * These files must have names that match the paths in the `weightsManifest` + * contained by the aforementioned JSON file, or errors will be thrown + * during loading. These weights files have the same format as the ones + * generated by `tensorflowjs_converter` that comes with the `tensorflowjs` + * Python PIP package. If no weights files are provided, only the model + * topology will be loaded from the JSON file above. + * @returns An instance of `Files` `IOHandler`. + * + * @doc { + * heading: 'Models', + * subheading: 'Loading', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function browserFiles(files: File[]): IOHandler; + +/** + * Deprecated. Use `tf.io.http`. + * @param path + * @param loadOptions + */ +declare function browserHTTPRequest(path: string, loadOptions?: LoadOptions): IOHandler; + +/** draw processed canvas */ +declare function canvas(input: AnyCanvas | HTMLImageElement | HTMLVideoElement, output: AnyCanvas): Promise; + +/** + * Concatenate a number of ArrayBuffers into one. + * + * @param buffers A number of array buffers to concatenate. + * @returns Result of concatenating `buffers` in order. + */ +declare function concatenateArrayBuffers(buffers: ArrayBuffer[]): ArrayBuffer; + +/** + * Configuration interface definition for **Human** library + * Contains all configurable parameters + * Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L262) + */ +export declare interface Config { + /** Backend used for TFJS operations + * valid build-in backends are: + * - Browser: `cpu`, `wasm`, `webgl`, `humangl`, `webgpu` + * - NodeJS: `cpu`, `wasm`, `tensorflow` + * default: `humangl` for browser and `tensorflow` for nodejs + */ + backend: '' | 'cpu' | 'wasm' | 'webgl' | 'humangl' | 'tensorflow' | 'webgpu'; + /** Path to *.wasm files if backend is set to `wasm` + * + * default: auto-detects to link to CDN `jsdelivr` when running in browser + */ + wasmPath: string; + /** Print debug statements to console + * + * default: `true` + */ + debug: boolean; + /** Perform model loading and inference concurrently or sequentially + * + * default: `true` + */ + async: boolean; + /** What to use for `human.warmup()` + * - warmup pre-initializes all models for faster inference but can take significant time on startup + * - used by `webgl`, `humangl` and `webgpu` backends + * + * default: `full` + */ + warmup: '' | 'none' | 'face' | 'full' | 'body'; + /** Base model path (typically starting with file://, http:// or https://) for all models + * - individual modelPath values are relative to this path + * + * default: `../models/` for browsers and `file://models/` for nodejs + */ + modelBasePath: string; + /** Cache models in IndexDB on first sucessfull load + * default: true if indexdb is available (browsers), false if its not (nodejs) + */ + cacheModels: boolean; + /** Cache sensitivity + * - values 0..1 where 0.01 means reset cache if input changed more than 1% + * - set to 0 to disable caching + * + * default: 0.7 + */ + cacheSensitivity: number; + /** Perform immediate garbage collection on deallocated tensors instead of caching them */ + deallocate: boolean; + /** Internal Variable */ + skipAllowed: boolean; + /** Filter config {@link FilterConfig} */ + filter: Partial; + /** Gesture config {@link GestureConfig} */ + gesture: Partial; + /** Face config {@link FaceConfig} */ + face: Partial; + /** Body config {@link BodyConfig} */ + body: Partial; + /** Hand config {@link HandConfig} */ + hand: Partial; + /** Object config {@link ObjectConfig} */ + object: Partial; + /** Segmentation config {@link SegmentationConfig} */ + segmentation: Partial; +} + +/** + * Copy a model from one URL to another. + * + * This function supports: + * + * 1. Copying within a storage medium, e.g., + * `tf.io.copyModel('localstorage://model-1', 'localstorage://model-2')` + * 2. Copying between two storage mediums, e.g., + * `tf.io.copyModel('localstorage://model-1', 'indexeddb://model-1')` + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Copy the model, from Local Storage to IndexedDB. + * await tf.io.copyModel( + * 'localstorage://demo/management/model1', + * 'indexeddb://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Remove both models. + * await tf.io.removeModel('localstorage://demo/management/model1'); + * await tf.io.removeModel('indexeddb://demo/management/model1'); + * ``` + * + * @param sourceURL Source URL of copying. + * @param destURL Destination URL of copying. + * @returns ModelArtifactsInfo of the copied model (if and only if copying + * is successful). + * @throws Error if copying fails, e.g., if no model exists at `sourceURL`, or + * if `oldPath` and `newPath` are identical. + * + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function copyModel(sourceURL: string, destURL: string): Promise; + +/** + * We wrap data id since we use weak map to avoid memory leaks. + * Since we have our own memory management, we have a reference counter + * mapping a tensor to its data, so there is always a pointer (even if that + * data is otherwise garbage collectable). + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/ + * Global_Objects/WeakMap + */ +declare type DataId = object; + +declare type DataToGPUOptions = DataToGPUWebGLOption; + +declare interface DataToGPUWebGLOption { + customTexShape?: [number, number]; +} + +/** @docalias 'float32'|'int32'|'bool'|'complex64'|'string' */ +declare type DataType = keyof DataTypeMap; + +declare interface DataTypeMap { + float32: Float32Array; + int32: Int32Array; + bool: Uint8Array; + complex64: Float32Array; + string: string[]; +} + +/** + * Decode flat ArrayBuffer as weights. + * + * This function does not handle sharding. + * + * This function is the reverse of `encodeWeights`. + * + * @param buffer A flat ArrayBuffer carrying the binary values of the tensors + * concatenated in the order specified in `specs`. + * @param specs Specifications of the names, dtypes and shapes of the tensors + * whose value are encoded by `buffer`. + * @return A map from tensor name to tensor value, with the names corresponding + * to names in `specs`. + * @throws Error, if any of the tensors has unsupported dtype. + */ +declare function decodeWeights(buffer: ArrayBuffer, specs: WeightsManifestEntry[]): NamedTensorMap; + +/** - [See all default Config values...](https://github.com/vladmandic/human/blob/main/src/config.ts#L262) */ +export declare const defaults: Config; + +/** Face descriptor type as number array */ +export declare type Descriptor = Array; + +/** Calculates distance between two descriptors + * @param options - calculation options + * - order - algorithm to use + * Euclidean distance if `order` is 2 (default), Minkowski distance algorithm of nth order if `order` is higher than 2 + * - multiplier - by how much to enhance difference analysis in range of 1..100 + * default is 20 which normalizes results to similarity above 0.5 can be considered a match + */ +declare function distance(descriptor1: Descriptor, descriptor2: Descriptor, options?: MatchOptions): number; + +declare namespace draw { + export { + gesture, + face, + body, + hand, + object, + person, + canvas, + all, + DrawOptions, + options + } +} + +/** Draw Options + * - Accessed via `human.draw.options` or provided per each draw method as the drawOptions optional parameter + */ +export declare type DrawOptions = { + /** draw line color */ + color: string; + /** label color */ + labelColor: string; + /** label shadow color */ + shadowColor: string; + /** label font */ + font: string; + /** line spacing between labels */ + lineHeight: number; + /** line width for drawn lines */ + lineWidth: number; + /** size of drawn points */ + pointSize: number; + /** draw rounded boxes by n pixels */ + roundRect: number; + /** should points be drawn? */ + drawPoints: boolean; + /** should labels be drawn? */ + drawLabels: boolean; + /** should detected gestures be drawn? */ + drawGestures: boolean; + /** should draw boxes around detection results? */ + drawBoxes: boolean; + /** should draw polygons from detection points? */ + drawPolygons: boolean; + /** should draw gaze arrows? */ + drawGaze: boolean; + /** should fill polygons? */ + fillPolygons: boolean; + /** use z-coordinate when available */ + useDepth: boolean; + /** should lines be curved? */ + useCurves: boolean; +}; + +export declare type Emotion = 'angry' | 'disgust' | 'fear' | 'happy' | 'sad' | 'surprise' | 'neutral'; + +/** + * Encode a map from names to weight values as an ArrayBuffer, along with an + * `Array` of `WeightsManifestEntry` as specification of the encoded weights. + * + * This function does not perform sharding. + * + * This function is the reverse of `decodeWeights`. + * + * @param tensors A map ("dict") from names to tensors. + * @param group Group to which the weights belong (optional). + * @returns A `Promise` of + * - A flat `ArrayBuffer` with all the binary values of the `Tensor`s + * concatenated. + * - An `Array` of `WeightManifestEntry`s, carrying information including + * tensor names, `dtype`s and shapes. + * @throws Error: on unsupported tensor `dtype`. + */ +declare function encodeWeights(tensors: NamedTensorMap | NamedTensor[], group?: WeightGroup): Promise<{ + data: ArrayBuffer; + specs: WeightsManifestEntry[]; +}>; + +/** Env class that holds detected capabilities */ +export declare class Env { + /** Running in Browser */ + browser: boolean; + /** Running in NodeJS */ + node: boolean; + /** Running in WebWorker thread */ + worker: boolean; + /** Detected platform */ + platform: string; + /** Detected agent */ + agent: string; + /** List of supported backends */ + backends: string[]; + /** Has any work been performed so far */ + initial: boolean; + /** Are image filters supported? */ + filter: boolean | undefined; + /** TFJS instance details */ + tfjs: { + version: undefined | string; + }; + /** Is offscreenCanvas supported? */ + offscreen: undefined | boolean; + /** Are performance counter instant values or additive */ + perfadd: boolean; + /** WASM detected capabilities */ + wasm: { + supported: undefined | boolean; + backend: undefined | boolean; + simd: undefined | boolean; + multithread: undefined | boolean; + }; + /** WebGL detected capabilities */ + webgl: { + supported: undefined | boolean; + backend: undefined | boolean; + version: undefined | string; + renderer: undefined | string; + }; + /** WebGPU detected capabilities */ + webgpu: { + supported: undefined | boolean; + backend: undefined | boolean; + adapter: undefined | string; + }; + /** CPU info */ + cpu: { + model: undefined | string; + flags: string[]; + }; + /** List of supported kernels for current backend */ + kernels: string[]; + /** MonkeyPatch for Canvas */ + Canvas: undefined; + /** MonkeyPatch for Image */ + Image: undefined; + /** MonkeyPatch for ImageData */ + ImageData: undefined; + constructor(); + /** update backend information */ + updateBackend(): Promise; + /** update cpu information */ + updateCPU(): Promise; +} + +export declare const env: Env; + +/** Events dispatched by `human.events` + * - `create`: triggered when Human object is instantiated + * - `load`: triggered when models are loaded (explicitly or on-demand) + * - `image`: triggered when input image is processed + * - `result`: triggered when detection is complete + * - `warmup`: triggered when warmup is complete + */ +export declare type Events = 'create' | 'load' | 'image' | 'result' | 'warmup' | 'error'; + +/** Defines possible externally defined canvas */ +export declare type ExternalCanvas = typeof env.Canvas; + +/** draw detected faces */ +declare function face(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +/** Anti-spoofing part of face configuration */ +export declare interface FaceAntiSpoofConfig extends GenericConfig { +} + +/** Configures all face-specific options: face detection, mesh analysis, age, gender, emotion detection and face description */ +export declare interface FaceConfig extends GenericConfig { + detector: Partial; + mesh: Partial; + iris: Partial; + description: Partial; + emotion: Partial; + antispoof: Partial; + liveness: Partial; +} + +/** Description or face embedding part of face configuration + * - also used by age and gender detection + */ +export declare interface FaceDescriptionConfig extends GenericConfig { + /** minimum confidence for a detected face before results are discarded */ + minConfidence: number; +} + +/** Detector part of face configuration */ +export declare interface FaceDetectorConfig extends GenericConfig { + /** is face rotation correction performed after detecting face? + * used to correctly analyze faces under high angles + */ + rotation: boolean; + /** maximum number of detected faces */ + maxDetected: number; + /** minimum confidence for a detected face before results are discarded */ + minConfidence: number; + /** minimum overlap between two detected faces before one is discarded */ + iouThreshold: number; + /** should child models perform on masked image of a face */ + mask: boolean; + /** should face detection return processed and cropped face tensor that can with an external model for addtional processing? + * if enabled it must be manually deallocated to avoid memory leak */ + return: boolean; +} + +/** Emotion part of face configuration */ +export declare interface FaceEmotionConfig extends GenericConfig { + /** minimum confidence for a detected face before results are discarded */ + minConfidence: number; +} + +/** face gesture type */ +export declare type FaceGesture = `facing ${'left' | 'center' | 'right'}` | `blink ${'left' | 'right'} eye` | `mouth ${number}% open` | `head ${'up' | 'down'}`; + +/** Iris part of face configuration */ +export declare interface FaceIrisConfig extends GenericConfig { +} + +export declare type FaceLandmark = 'leftEye' | 'rightEye' | 'nose' | 'mouth' | 'leftEar' | 'rightEar' | 'symmetryLine' | 'silhouette' | 'lipsUpperOuter' | 'lipsLowerOuter' | 'lipsUpperInner' | 'lipsLowerInner' | 'rightEyeUpper0' | 'rightEyeLower0' | 'rightEyeUpper1' | 'rightEyeLower1' | 'rightEyeUpper2' | 'rightEyeLower2' | 'rightEyeLower3' | 'rightEyebrowUpper' | 'rightEyebrowLower' | 'rightEyeIris' | 'leftEyeUpper0' | 'leftEyeLower0' | 'leftEyeUpper1' | 'leftEyeLower1' | 'leftEyeUpper2' | 'leftEyeLower2' | 'leftEyeLower3' | 'leftEyebrowUpper' | 'leftEyebrowLower' | 'leftEyeIris' | 'midwayBetweenEyes' | 'noseTip' | 'noseBottom' | 'noseRightCorner' | 'noseLeftCorner' | 'rightCheek' | 'leftCheek'; + +/** Liveness part of face configuration */ +export declare interface FaceLivenessConfig extends GenericConfig { +} + +/** Mesh part of face configuration */ +export declare interface FaceMeshConfig extends GenericConfig { +} + +/** Face results + * - Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models + * - Some values may be null if specific model is not enabled + */ +export declare interface FaceResult { + /** face id */ + id: number; + /** overall face score */ + score: number; + /** detection score */ + boxScore: number; + /** mesh score */ + faceScore: number; + /** detected face box */ + box: Box; + /** detected face box normalized to 0..1 */ + boxRaw: Box; + /** detected face mesh */ + mesh: Array; + /** detected face mesh normalized to 0..1 */ + meshRaw: Array; + /** mesh keypoints combined into annotated results */ + annotations: Record; + /** detected age */ + age?: number; + /** detected gender */ + gender?: Gender; + /** gender detection score */ + genderScore?: number; + /** detected emotions */ + emotion?: Array<{ + score: number; + emotion: Emotion; + }>; + /** detected race */ + race?: Array<{ + score: number; + race: Race; + }>; + /** face descriptor */ + embedding?: Array; + /** face iris distance from camera */ + iris?: number; + /** face anti-spoofing result confidence */ + real?: number; + /** face liveness result confidence */ + live?: number; + /** face rotation details */ + rotation?: { + angle: { + roll: number; + yaw: number; + pitch: number; + }; + matrix: [number, number, number, number, number, number, number, number, number]; + gaze: { + bearing: number; + strength: number; + }; + } | null; + /** detected face as tensor that can be used in further pipelines */ + tensor?: Tensor; +} + +/** Run input through image filters before inference + * - available only in Browser environments + * - image filters run with near-zero latency as they are executed on the GPU using WebGL + */ +export declare interface FilterConfig { + /** are image filters enabled? */ + enabled: boolean; + /** perform image histogram equalization + * - equalization is performed on input as a whole and detected face before its passed for further analysis + */ + equalization: boolean; + /** resize input width + * - if both width and height are set to 0, there is no resizing + * - if just one is set, second one is scaled automatically + * - if both are set, values are used as-is + */ + width: number; + /** resize input height + * - if both width and height are set to 0, there is no resizing + * - if just one is set, second one is scaled automatically + * - if both are set, values are used as-is + */ + height: number; + /** return processed canvas imagedata in result */ + return: boolean; + /** flip input as mirror image */ + flip: boolean; + /** range: -1 (darken) to 1 (lighten) */ + brightness: number; + /** range: -1 (reduce contrast) to 1 (increase contrast) */ + contrast: number; + /** range: 0 (no sharpening) to 1 (maximum sharpening) */ + sharpness: number; + /** range: 0 (no blur) to N (blur radius in pixels) */ + blur: number; + /** range: -1 (reduce saturation) to 1 (increase saturation) */ + saturation: number; + /** range: 0 (no change) to 360 (hue rotation in degrees) */ + hue: number; + /** image negative */ + negative: boolean; + /** image sepia colors */ + sepia: boolean; + /** image vintage colors */ + vintage: boolean; + /** image kodachrome colors */ + kodachrome: boolean; + /** image technicolor colors */ + technicolor: boolean; + /** image polaroid camera effect */ + polaroid: boolean; + /** range: 0 (no pixelate) to N (number of pixels to pixelate) */ + pixelate: number; +} + +export declare type Finger = 'index' | 'middle' | 'pinky' | 'ring' | 'thumb' | 'palm'; + +export declare type FingerCurl = 'none' | 'half' | 'full'; + +export declare type FingerDirection = 'verticalUp' | 'verticalDown' | 'horizontalLeft' | 'horizontalRight' | 'diagonalUpRight' | 'diagonalUpLeft' | 'diagonalDownRight' | 'diagonalDownLeft'; + +/** + * Creates an IOHandler that loads model artifacts from memory. + * + * When used in conjunction with `tf.loadLayersModel`, an instance of + * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts. + * + * ```js + * const model = await tf.loadLayersModel(tf.io.fromMemory( + * modelTopology, weightSpecs, weightData)); + * ``` + * + * @param modelArtifacts a object containing model topology (i.e., parsed from + * the JSON format). + * @param weightSpecs An array of `WeightsManifestEntry` objects describing the + * names, shapes, types, and quantization of the weight data. + * @param weightData A single `ArrayBuffer` containing the weight data, + * concatenated in the order described by the weightSpecs. + * @param trainingConfig Model training configuration. Optional. + * + * @returns A passthrough `IOHandler` that simply loads the provided data. + */ +declare function fromMemory(modelArtifacts: {} | ModelArtifacts, weightSpecs?: WeightsManifestEntry[], weightData?: ArrayBuffer, trainingConfig?: TrainingConfig): IOHandler; + +export declare type Gender = 'male' | 'female' | 'unknown'; + +/** Generic config type inherited by all module types */ +export declare interface GenericConfig { + /** is module enabled? */ + enabled: boolean; + /** path to model json file (relative to `modelBasePath` */ + modelPath: string; + /** how many max frames to go without re-running model if cached results are acceptable + * for two-phase models such as face and hand caching applies to bounding boxes detection only */ + skipFrames: number; + /** how many max milliseconds to go without re-running model if cached results are acceptable + * for two-phase models such as face and hand caching applies to bounding boxes detection only */ + skipTime: number; +} + +/** draw detected gestures */ +declare function gesture(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +/** Controlls gesture detection */ +export declare interface GestureConfig { + /** is gesture detection enabled? */ + enabled: boolean; +} + +/** Gesture combined results + * Each result has: + * - part: part name and number where gesture was detected: `face`, `iris`, `body`, `hand` + * - gesture: gesture detected + */ +export declare type GestureResult = { + 'face': number; + gesture: FaceGesture; +} | { + 'iris': number; + gesture: IrisGesture; +} | { + 'body': number; + gesture: BodyGesture; +} | { + 'hand': number; + gesture: HandGesture; +}; + +declare const getLoadHandlers: (url: string | string[], loadOptions?: LoadOptions) => IOHandler[]; + +/** + * Create `ModelArtifacts` from a JSON file. + * + * @param modelJSON Object containing the parsed JSON of `model.json` + * @param loadWeights Function that takes the JSON file's weights manifest, + * reads weights from the listed path(s), and returns a Promise of the + * weight manifest entries along with the weights data. + * @returns A Promise of the `ModelArtifacts`, as described by the JSON file. + */ +declare function getModelArtifactsForJSON(modelJSON: ModelJSON, loadWeights: (weightsManifest: WeightsManifestConfig) => Promise<[WeightsManifestEntry[], /* weightData */ ArrayBuffer]>): Promise; + +/** + * Populate ModelArtifactsInfo fields for a model with JSON topology. + * @param modelArtifacts + * @returns A ModelArtifactsInfo object. + */ +declare function getModelArtifactsInfoForJSON(modelArtifacts: ModelArtifacts): ModelArtifactsInfo; + +declare const getSaveHandlers: (url: string | string[]) => IOHandler[]; + +declare interface GPUData { + tensorRef: Tensor; + texture?: WebGLTexture; + texShape?: [number, number]; +} + +/** + * A `tf.GraphModel` is a directed, acyclic graph built from a + * SavedModel GraphDef and allows inference execution. + * + * A `tf.GraphModel` can only be created by loading from a model converted from + * a [TensorFlow SavedModel](https://www.tensorflow.org/guide/saved_model) using + * the command line converter tool and loaded via `tf.loadGraphModel`. + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ +export declare class GraphModel implements InferenceModel { + private modelUrl; + private loadOptions; + private executor; + private version; + private handler; + private artifacts; + private initializer; + private resourceManager; + private signature; + readonly modelVersion: string; + readonly inputNodes: string[]; + readonly outputNodes: string[]; + readonly inputs: TensorInfo[]; + readonly outputs: TensorInfo[]; + readonly weights: NamedTensorsMap; + readonly metadata: {}; + readonly modelSignature: {}; + /** + * @param modelUrl url for the model, or an `io.IOHandler`. + * @param weightManifestUrl url for the weight file generated by + * scripts/convert.py script. + * @param requestOption options for Request, which allows to send credentials + * and custom headers. + * @param onProgress Optional, progress callback function, fired periodically + * before the load is completed. + */ + constructor(modelUrl: string | io.IOHandler, loadOptions?: io.LoadOptions); + private findIOHandler; + /** + * Loads the model and weight files, construct the in memory weight map and + * compile the inference graph. + */ + load(): Promise; + /** + * Synchronously construct the in memory weight map and + * compile the inference graph. Also initialize hashtable if any. + * + * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true} + */ + loadSync(artifacts: io.ModelArtifacts): boolean; + /** + * Save the configuration and/or weights of the GraphModel. + * + * An `IOHandler` is an object that has a `save` method of the proper + * signature defined. The `save` method manages the storing or + * transmission of serialized data ("artifacts") that represent the + * model's topology and weights onto or via a specific medium, such as + * file downloads, local storage, IndexedDB in the web browser and HTTP + * requests to a server. TensorFlow.js provides `IOHandler` + * implementations for a number of frequently used saving mediums, such as + * `tf.io.browserDownloads` and `tf.io.browserLocalStorage`. See `tf.io` + * for more details. + * + * This method also allows you to refer to certain types of `IOHandler`s + * as URL-like string shortcuts, such as 'localstorage://' and + * 'indexeddb://'. + * + * Example 1: Save `model`'s topology and weights to browser [local + * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage); + * then load it back. + * + * ```js + * const modelUrl = + * 'https://storage.googleapis.com/tfjs-models/savedmodel/mobilenet_v2_1.0_224/model.json'; + * const model = await tf.loadGraphModel(modelUrl); + * const zeros = tf.zeros([1, 224, 224, 3]); + * model.predict(zeros).print(); + * + * const saveResults = await model.save('localstorage://my-model-1'); + * + * const loadedModel = await tf.loadGraphModel('localstorage://my-model-1'); + * console.log('Prediction from loaded model:'); + * model.predict(zeros).print(); + * ``` + * + * @param handlerOrURL An instance of `IOHandler` or a URL-like, + * scheme-based string shortcut for `IOHandler`. + * @param config Options for saving the model. + * @returns A `Promise` of `SaveResult`, which summarizes the result of + * the saving, such as byte sizes of the saved artifacts for the model's + * topology and weight values. + * + * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true} + */ + save(handlerOrURL: io.IOHandler | string, config?: io.SaveConfig): Promise; + /** + * Execute the inference for the input tensors. + * + * @param input The input tensors, when there is single input for the model, + * inputs param should be a `tf.Tensor`. For models with mutliple inputs, + * inputs params should be in either `tf.Tensor`[] if the input order is + * fixed, or otherwise NamedTensorMap format. + * + * For model with multiple inputs, we recommend you use NamedTensorMap as the + * input type, if you use `tf.Tensor`[], the order of the array needs to + * follow the + * order of inputNodes array. @see {@link GraphModel.inputNodes} + * + * You can also feed any intermediate nodes using the NamedTensorMap as the + * input type. For example, given the graph + * InputNode => Intermediate => OutputNode, + * you can execute the subgraph Intermediate => OutputNode by calling + * model.execute('IntermediateNode' : tf.tensor(...)); + * + * This is useful for models that uses tf.dynamic_rnn, where the intermediate + * state needs to be fed manually. + * + * For batch inference execution, the tensors for each input need to be + * concatenated together. For example with mobilenet, the required input shape + * is [1, 244, 244, 3], which represents the [batch, height, width, channel]. + * If we are provide a batched data of 100 images, the input tensor should be + * in the shape of [100, 244, 244, 3]. + * + * @param config Prediction configuration for specifying the batch size and + * output node names. Currently the batch size option is ignored for graph + * model. + * + * @returns Inference result tensors. The output would be single `tf.Tensor` + * if model has single output node, otherwise Tensor[] or NamedTensorMap[] + * will be returned for model with multiple outputs. + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + predict(inputs: Tensor | Tensor[] | NamedTensorMap, config?: ModelPredictConfig): Tensor | Tensor[] | NamedTensorMap; + private normalizeInputs; + private normalizeOutputs; + /** + * Executes inference for the model for given input tensors. + * @param inputs tensor, tensor array or tensor map of the inputs for the + * model, keyed by the input node names. + * @param outputs output node name from the Tensorflow model, if no + * outputs are specified, the default outputs of the model would be used. + * You can inspect intermediate nodes of the model by adding them to the + * outputs array. + * + * @returns A single tensor if provided with a single output or no outputs + * are provided and there is only one default output, otherwise return a + * tensor array. The order of the tensor array is the same as the outputs + * if provided, otherwise the order of outputNodes attribute of the model. + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + execute(inputs: Tensor | Tensor[] | NamedTensorMap, outputs?: string | string[]): Tensor | Tensor[]; + /** + * Executes inference for the model for given input tensors in async + * fashion, use this method when your model contains control flow ops. + * @param inputs tensor, tensor array or tensor map of the inputs for the + * model, keyed by the input node names. + * @param outputs output node name from the Tensorflow model, if no outputs + * are specified, the default outputs of the model would be used. You can + * inspect intermediate nodes of the model by adding them to the outputs + * array. + * + * @returns A Promise of single tensor if provided with a single output or + * no outputs are provided and there is only one default output, otherwise + * return a tensor map. + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + executeAsync(inputs: Tensor | Tensor[] | NamedTensorMap, outputs?: string | string[]): Promise; + /** + * Get intermediate tensors for model debugging mode (flag + * KEEP_INTERMEDIATE_TENSORS is true). + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + getIntermediateTensors(): NamedTensorsMap; + /** + * Dispose intermediate tensors for model debugging mode (flag + * KEEP_INTERMEDIATE_TENSORS is true). + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + disposeIntermediateTensors(): void; + private convertTensorMapToTensorsMap; + /** + * Releases the memory used by the weight tensors and resourceManager. + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + dispose(): void; +} + +/** draw detected hands */ +declare function hand(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +/** Configures all hand detection specific options */ +export declare interface HandConfig extends GenericConfig { + /** should hand rotation correction be performed after hand detection? */ + rotation: boolean; + /** minimum confidence for a detected hand before results are discarded */ + minConfidence: number; + /** minimum overlap between two detected hands before one is discarded */ + iouThreshold: number; + /** maximum number of detected hands */ + maxDetected: number; + /** should hand landmarks be detected or just return detected hand box */ + landmarks: boolean; + detector: { + /** path to hand detector model json */ + modelPath?: string; + }; + skeleton: { + /** path to hand skeleton model json */ + modelPath?: string; + }; +} + +/** hand gesture type */ +export declare type HandGesture = `${'thumb' | 'index' | 'middle' | 'ring' | 'pinky'} forward` | `${'thumb' | 'index' | 'middle' | 'ring' | 'pinky'} up` | 'victory' | 'thumbs up'; + +/** Hand results */ +export declare interface HandResult { + /** hand id */ + id: number; + /** hand overal score */ + score: number; + /** hand detection score */ + boxScore: number; + /** hand skelton score */ + fingerScore: number; + /** detected hand box */ + box: Box; + /** detected hand box normalized to 0..1 */ + boxRaw: Box; + /** detected hand keypoints */ + keypoints: Array; + /** detected hand class */ + label: HandType; + /** detected hand keypoints combined into annotated parts */ + annotations: Record>; + /** detected hand parts annotated with part gestures */ + landmarks: Record; +} + +export declare type HandType = 'hand' | 'fist' | 'pinch' | 'point' | 'face' | 'tip' | 'pinchtip'; + +/** + * Creates an IOHandler subtype that sends model artifacts to HTTP server. + * + * An HTTP request of the `multipart/form-data` mime type will be sent to the + * `path` URL. The form data includes artifacts that represent the topology + * and/or weights of the model. In the case of Keras-style `tf.Model`, two + * blobs (files) exist in form-data: + * - A JSON file consisting of `modelTopology` and `weightsManifest`. + * - A binary weights file consisting of the concatenated weight values. + * These files are in the same format as the one generated by + * [tfjs_converter](https://js.tensorflow.org/tutorials/import-keras.html). + * + * The following code snippet exemplifies the client-side code that uses this + * function: + * + * ```js + * const model = tf.sequential(); + * model.add( + * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'})); + * + * const saveResult = await model.save(tf.io.http( + * 'http://model-server:5000/upload', {requestInit: {method: 'PUT'}})); + * console.log(saveResult); + * ``` + * + * If the default `POST` method is to be used, without any custom parameters + * such as headers, you can simply pass an HTTP or HTTPS URL to `model.save`: + * + * ```js + * const saveResult = await model.save('http://model-server:5000/upload'); + * ``` + * + * The following GitHub Gist + * https://gist.github.com/dsmilkov/1b6046fd6132d7408d5257b0976f7864 + * implements a server based on [flask](https://github.com/pallets/flask) that + * can receive the request. Upon receiving the model artifacts via the requst, + * this particular server reconsistutes instances of [Keras + * Models](https://keras.io/models/model/) in memory. + * + * + * @param path A URL path to the model. + * Can be an absolute HTTP path (e.g., + * 'http://localhost:8000/model-upload)') or a relative path (e.g., + * './model-upload'). + * @param requestInit Request configurations to be used when sending + * HTTP request to server using `fetch`. It can contain fields such as + * `method`, `credentials`, `headers`, `mode`, etc. See + * https://developer.mozilla.org/en-US/docs/Web/API/Request/Request + * for more information. `requestInit` must not have a body, because the + * body will be set by TensorFlow.js. File blobs representing the model + * topology (filename: 'model.json') and the weights of the model (filename: + * 'model.weights.bin') will be appended to the body. If `requestInit` has a + * `body`, an Error will be thrown. + * @param loadOptions Optional configuration for the loading. It includes the + * following fields: + * - weightPathPrefix Optional, this specifies the path prefix for weight + * files, by default this is calculated from the path param. + * - fetchFunc Optional, custom `fetch` function. E.g., in Node.js, + * the `fetch` from node-fetch can be used here. + * - onProgress Optional, progress callback function, fired periodically + * before the load is completed. + * @returns An instance of `IOHandler`. + * + * @doc { + * heading: 'Models', + * subheading: 'Loading', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function http(path: string, loadOptions?: LoadOptions): IOHandler; + +/** **Human** library main class + * + * All methods and properties are available only as members of Human class + * + * - Configuration object definition: {@link Config} + * - Results object definition: {@link Result} + * - Possible inputs: {@link Input} + * + * @param userConfig - {@link Config} + * @returns instance of {@link Human} + */ +declare class Human { + #private; + /** Current version of Human library in *semver* format */ + version: string; + /** Current configuration + * - Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L262) + */ + config: Config; + /** Last known result of detect run + * - Can be accessed anytime after initial detection + */ + result: Result; + /** Current state of Human library + * - Can be polled to determine operations that are currently executed + * - Progresses through: 'config', 'check', 'backend', 'load', 'run:', 'idle' + */ + state: string; + /** currenty processed image tensor and canvas */ + process: { + tensor: Tensor | null; + canvas: AnyCanvas | null; + }; + /** Instance of TensorFlow/JS used by Human + * - Can be embedded or externally provided + * [TFJS API]: {@link https://js.tensorflow.org/api/latest/} + */ + tf: any; + /** Object containing environment information used for diagnostics */ + env: Env; + /** Draw helper classes that can draw detected objects on canvas using specified draw + * - canvas: draws input to canvas + * - options: are global settings for all draw operations, can be overriden for each draw method {@link DrawOptions} + * - face, body, hand, gesture, object, person: draws detected results as overlays on canvas + */ + draw: { + canvas: typeof draw.canvas; + face: typeof draw.face; + body: typeof draw.body; + hand: typeof draw.hand; + gesture: typeof draw.gesture; + object: typeof draw.object; + person: typeof draw.person; + all: typeof draw.all; + options: DrawOptions; + }; + /** Currently loaded models + * @internal + * {@link Models} + */ + models: models.Models; + /** Container for events dispatched by Human + * Possible events: + * - `create`: triggered when Human object is instantiated + * - `load`: triggered when models are loaded (explicitly or on-demand) + * - `image`: triggered when input image is processed + * - `result`: triggered when detection is complete + * - `warmup`: triggered when warmup is complete + * - `error`: triggered on some errors + */ + events: EventTarget | undefined; + /** Reference face triangualtion array of 468 points, used for triangle references between points */ + faceTriangulation: number[]; + /** Refernce UV map of 468 values, used for 3D mapping of the face mesh */ + faceUVMap: [number, number][]; + /** Performance object that contains values for all recently performed operations */ + performance: Record; + /** WebGL debug info */ + gl: Record; + /** Constructor for **Human** library that is futher used for all operations + * @param userConfig - user configuration object {@link Config} + */ + constructor(userConfig?: Partial); + /** internal function to measure tensor leaks */ + analyze: (...msg: string[]) => void; + /** Reset configuration to default values */ + reset(): void; + /** Validate current configuration schema */ + validate(userConfig?: Partial): { + reason: string; + where: string; + expected?: string; + }[]; + /** Exports face matching methods {@link match#similarity} */ + similarity: typeof match.similarity; + /** Exports face matching methods {@link match#distance} */ + distance: typeof match.distance; + /** Exports face matching methods {@link match#match} */ + match: typeof match.match; + /** Utility wrapper for performance.now() */ + now(): number; + /** Process input as return canvas and tensor + * + * @param input - any input {@link Input} + * @param getTensor - should image processing also return tensor or just canvas + * Returns object with `tensor` and `canvas` + */ + image(input: Input, getTensor?: boolean): Promise<{ + tensor: Tensor | null; + canvas: AnyCanvas | null; + }>; + /** Segmentation method takes any input and returns processed canvas with body segmentation + * - Segmentation is not triggered as part of detect process + * @param input - {@link Input} + * @param background - {@link Input} + * - Optional parameter background is used to fill the background with specific input + * Returns: + * - `data` as raw data array with per-pixel segmentation values + * - `canvas` as canvas which is input image filtered with segementation data and optionally merged with background image. canvas alpha values are set to segmentation values for easy merging + * - `alpha` as grayscale canvas that represents segmentation alpha values + */ + segmentation(input: Input, background?: Input): Promise<{ + data: number[] | Tensor; + canvas: AnyCanvas | null; + alpha: AnyCanvas | null; + }>; + /** Enhance method performs additional enhacements to face image previously detected for futher processing + * + * @param input - Tensor as provided in human.result.face[n].tensor + * @returns Tensor + */ + enhance(input: Tensor): Tensor | null; + /** Compare two input tensors for pixel simmilarity + * - use `human.image` to process any valid input and get a tensor that can be used for compare + * - when passing manually generated tensors: + * - both input tensors must be in format [1, height, width, 3] + * - if resolution of tensors does not match, second tensor will be resized to match resolution of the first tensor + * - return value is pixel similarity score normalized by input resolution and rgb channels + */ + compare(firstImageTensor: Tensor, secondImageTensor: Tensor): Promise; + /** Explicit backend initialization + * - Normally done implicitly during initial load phase + * - Call to explictly register and initialize TFJS backend without any other operations + * - Use when changing backend during runtime + */ + init(): Promise; + /** Load method preloads all configured models on-demand + * - Not explicitly required as any required model is load implicitly on it's first run + * + * @param userConfig - {@link Config} + */ + load(userConfig?: Partial): Promise; + /** emit event */ + emit: (event: string) => void; + /** Runs interpolation using last known result and returns smoothened result + * Interpolation is based on time since last known result so can be called independently + * + * @param result - {@link Result} optional use specific result set to run interpolation on + * @returns result - {@link Result} + */ + next(result?: Result): Result; + /** Warmup method pre-initializes all configured models for faster inference + * - can take significant time on startup + * - only used for `webgl` and `humangl` backends + * @param userConfig - {@link Config} + * @returns result - {@link Result} + */ + warmup(userConfig?: Partial): Promise; + /** Run detect with tensorflow profiling + * - result object will contain total exeuction time information for top-20 kernels + * - actual detection object can be accessed via `human.result` + */ + profile(input: Input, userConfig?: Partial): Promise>; + /** Main detection method + * - Analyze configuration: {@link Config} + * - Pre-process input: {@link Input} + * - Run inference for all configured models + * - Process and return result: {@link Result} + * + * @param input - {@link Input} + * @param userConfig - {@link Config} + * @returns result - {@link Result} + */ + detect(input: Input, userConfig?: Partial): Promise; +} +export { Human } +export default Human; + +/** Defines all possible image objects */ +export declare type ImageObjects = ImageData | ImageBitmap; + +/** + * Common interface for a machine learning model that can do inference. + */ +declare interface InferenceModel { + /** + * Return the array of input tensor info. + */ + readonly inputs: ModelTensorInfo[]; + /** + * Return the array of output tensor info. + */ + readonly outputs: ModelTensorInfo[]; + /** + * Execute the inference for the input tensors. + * + * @param input The input tensors, when there is single input for the model, + * inputs param should be a Tensor. For models with multiple inputs, inputs + * params should be in either Tensor[] if the input order is fixed, or + * otherwise NamedTensorMap format. + * For batch inference execution, the tensors for each input need to be + * concatenated together. For example with mobilenet, the required input shape + * is [1, 244, 244, 3], which represents the [batch, height, width, channel]. + * If we are provide a batched data of 100 images, the input tensor should be + * in the shape of [100, 244, 244, 3]. + * + * @param config Prediction configuration for specifying the batch size. + * + * @returns Inference result tensors. The output would be single Tensor if + * model has single output node, otherwise Tensor[] or NamedTensorMap[] will + * be returned for model with multiple outputs. + */ + predict(inputs: Tensor | Tensor[] | NamedTensorMap, config: ModelPredictConfig): Tensor | Tensor[] | NamedTensorMap; + /** + * Single Execute the inference for the input tensors and return activation + * values for specified output node names without batching. + * + * @param input The input tensors, when there is single input for the model, + * inputs param should be a Tensor. For models with multiple inputs, inputs + * params should be in either Tensor[] if the input order is fixed, or + * otherwise NamedTensorMap format. + * + * @param outputs string|string[]. List of output node names to retrieve + * activation from. + * + * @returns Activation values for the output nodes result tensors. The return + * type matches specified parameter outputs type. The output would be single + * Tensor if single output is specified, otherwise Tensor[] for multiple + * outputs. + */ + execute(inputs: Tensor | Tensor[] | NamedTensorMap, outputs: string | string[]): Tensor | Tensor[]; +} + +/** Defines all possible input types for **Human** detection */ +export declare type Input = Tensor | AnyCanvas | AnyImage | AnyVideo | ImageObjects | ExternalCanvas; + +declare namespace io { + export { + copyModel, + listModels, + moveModel, + removeModel, + browserFiles, + browserHTTPRequest, + concatenateArrayBuffers, + decodeWeights, + encodeWeights, + fromMemory, + getLoadHandlers, + getModelArtifactsForJSON, + getModelArtifactsInfoForJSON, + getSaveHandlers, + http, + IOHandler, + isHTTPScheme, + LoadHandler, + LoadOptions, + loadWeights, + ModelArtifacts, + ModelArtifactsInfo, + ModelJSON, + ModelStoreManager, + OnProgressCallback, + registerLoadRouter, + registerSaveRouter, + RequestDetails, + SaveConfig, + SaveHandler, + SaveResult, + TrainingConfig, + WeightGroup, + weightsLoaderFactory, + WeightsManifestConfig, + WeightsManifestEntry, + withSaveHandler + } +} + +/** + * Interface for a model import/export handler. + * + * The `save` and `load` handlers are both optional, in order to allow handlers + * that support only saving or loading. + */ +declare interface IOHandler { + save?: SaveHandler; + load?: LoadHandler; +} + +declare type IORouter = (url: string | string[], loadOptions?: LoadOptions) => IOHandler; + +/** iris gesture type */ +export declare type IrisGesture = 'facing center' | `looking ${'left' | 'right' | 'up' | 'down'}` | 'looking center'; + +declare function isHTTPScheme(url: string): boolean; + +/** + * List all models stored in registered storage mediums. + * + * For a web browser environment, the registered mediums are Local Storage and + * IndexedDB. + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Delete the model. + * await tf.io.removeModel('localstorage://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * ``` + * + * @returns A `Promise` of a dictionary mapping URLs of existing models to + * their model artifacts info. URLs include medium-specific schemes, e.g., + * 'indexeddb://my/model/1'. Model artifacts info include type of the + * model's topology, byte sizes of the topology, weights, etc. + * + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function listModels(): Promise<{ + [url: string]: ModelArtifactsInfo; +}>; + +/** Load method preloads all instance.configured models on-demand */ +declare function load(instance: Human): Promise; + +/** + * Type definition for handlers of loading operations. + */ +declare type LoadHandler = () => Promise; + +/** @innamespace io */ +declare interface LoadOptions { + /** + * RequestInit (options) for HTTP requests. + * + * For detailed information on the supported fields, see + * [https://developer.mozilla.org/en-US/docs/Web/API/Request/Request]( + * https://developer.mozilla.org/en-US/docs/Web/API/Request/Request) + */ + requestInit?: RequestInit; + /** + * Progress callback. + */ + onProgress?: OnProgressCallback; + /** + * A function used to override the `window.fetch` function. + */ + fetchFunc?: Function; + /** + * Strict loading model: whether extraneous weights or missing + * weights should trigger an `Error`. + * + * If `true`, require that the provided weights exactly match those + * required by the layers. `false` means that both extra weights + * and missing weights will be silently ignored. + * + * Default: `true`. + */ + strict?: boolean; + /** + * Path prefix for weight files, by default this is calculated from the + * path of the model JSON file. + * + * For instance, if the path to the model JSON file is + * `http://localhost/foo/model.json`, then the default path prefix will be + * `http://localhost/foo/`. If a weight file has the path value + * `group1-shard1of2` in the weight manifest, then the weight file will be + * loaded from `http://localhost/foo/group1-shard1of2` by default. However, + * if you provide a `weightPathPrefix` value of + * `http://localhost/foo/alt-weights`, then the weight file will be loaded + * from the path `http://localhost/foo/alt-weights/group1-shard1of2` instead. + */ + weightPathPrefix?: string; + /** + * Whether the module or model is to be loaded from TF Hub. + * + * Setting this to `true` allows passing a TF-Hub module URL, omitting the + * standard model file name and the query parameters. + * + * Default: `false`. + */ + fromTFHub?: boolean; + /** + * An async function to convert weight file name to URL. The weight file + * names are stored in model.json's weightsManifest.paths field. By default we + * consider weight files are colocated with the model.json file. For example: + * model.json URL: https://www.google.com/models/1/model.json + * group1-shard1of1.bin url: + * https://www.google.com/models/1/group1-shard1of1.bin + * + * With this func you can convert the weight file name to any URL. + */ + weightUrlConverter?: (weightFileName: string) => Promise; +} + +/** + * Reads a weights manifest JSON configuration, fetches the weights and + * returns them as `Tensor`s. + * + * @param manifest The weights manifest JSON. + * @param filePathPrefix The path prefix for filenames given in the manifest. + * Defaults to the empty string. + * @param weightNames The names of the weights to be fetched. + */ +declare function loadWeights(manifest: WeightsManifestConfig, filePathPrefix?: string, weightNames?: string[], requestInit?: RequestInit): Promise; + +declare namespace match { + export { + distance, + similarity, + match_2 as match, + Descriptor, + MatchOptions + } +} + +/** Matches given descriptor to a closest entry in array of descriptors + * @param descriptor - face descriptor + * @param descriptors - array of face descriptors to commpare given descriptor to + * @param options - see {@link similarity} + * Returns + * - `index` index array index where best match was found or -1 if no matches + * - {@link distance} calculated `distance` of given descriptor to the best match + * - {@link similarity} calculated normalized `similarity` of given descriptor to the best match + */ +declare function match_2(descriptor: Descriptor, descriptors: Array, options?: MatchOptions): { + index: number; + distance: number; + similarity: number; +}; + +declare type MatchOptions = { + order?: number; + threshold?: number; + multiplier?: number; + min?: number; + max?: number; +} | undefined; + +/** + * The serialized artifacts of a model, including topology and weights. + * + * The `modelTopology`, `trainingConfig`, `weightSpecs` and `weightData` fields + * of this interface are optional, in order to support topology- or weights-only + * saving and loading. + * + * Note this interface is used internally in IOHandlers. For the file format + * written to disk as `model.json`, see `ModelJSON`. + */ +declare interface ModelArtifacts { + /** + * Model topology. + * + * For Keras-style `tf.Model`s, this is a JSON object. + * For TensorFlow-style models (e.g., `SavedModel`), this is the JSON + * encoding of the `GraphDef` protocol buffer. + */ + modelTopology?: {} | ArrayBuffer; + /** + * Serialized configuration for the model's training. + */ + trainingConfig?: TrainingConfig; + /** + * Weight specifications. + * + * This corresponds to the weightsData below. + */ + weightSpecs?: WeightsManifestEntry[]; + /** + * Binary buffer for all weight values concatenated in the order specified + * by `weightSpecs`. + */ + weightData?: ArrayBuffer; + /** + * Hard-coded format name for models saved from TensorFlow.js or converted + * by TensorFlow.js Converter. + */ + format?: string; + /** + * What library is responsible for originally generating this artifact. + * + * Used for debugging purposes. E.g., 'TensorFlow.js v1.0.0'. + */ + generatedBy?: string; + /** + * What library or tool is responsible for converting the original model + * to this format, applicable only if the model is output by a converter. + * + * Used for debugging purposes. E.g., 'TensorFlow.js Converter v1.0.0'. + * + * A value of `null` means the model artifacts are generated without any + * conversion process (e.g., saved directly from a TensorFlow.js + * `tf.LayersModel` instance.) + */ + convertedBy?: string | null; + /** + * Inputs and outputs signature for saved model. + */ + signature?: {}; + /** + * User-defined metadata about the model. + */ + userDefinedMetadata?: { + [key: string]: {}; + }; + /** + * Initializer for the model. + */ + modelInitializer?: {}; +} + +declare interface ModelArtifactsInfo { + /** + * Timestamp for when the model is saved. + */ + dateSaved: Date; + /** + * TODO (cais,yassogba) consider removing GraphDef as GraphDefs now + * come in a JSON format and none of our IOHandlers support a non json + * format. We could conder replacing this with 'Binary' if we want to + * allow future handlers to save to non json formats (though they will + * probably want more information than 'Binary'). + * Type of the model topology + * + * Type of the model topology + * + * Possible values: + * - JSON: JSON config (human-readable, e.g., Keras JSON). + * - GraphDef: TensorFlow + * [GraphDef](https://www.tensorflow.org/extend/tool_developers/#graphdef) + * protocol buffer (binary). + */ + modelTopologyType: 'JSON' | 'GraphDef'; + /** + * Size of model topology (Keras JSON or GraphDef), in bytes. + */ + modelTopologyBytes?: number; + /** + * Size of weight specification or manifest, in bytes. + */ + weightSpecsBytes?: number; + /** + * Size of weight value data, in bytes. + */ + weightDataBytes?: number; +} + +/** + * The on-disk format of the `model.json` file. + * + * TF.js 1.0 always populates the optional fields when writing model.json. + * Prior versions did not provide those fields. + */ +declare interface ModelJSON { + /** + * Model topology. + * + * For Keras-style `tf.Model`s, this is a JSON object. + * For TensorFlow-style models (e.g., `SavedModel`), this is the JSON + * encoding of the `GraphDef` protocol buffer. + */ + modelTopology: {}; + /** Model training configuration. */ + trainingConfig?: TrainingConfig; + /** + * Weights manifest. + * + * The weights manifest consists of an ordered list of weight-manifest + * groups. Each weight-manifest group consists of a number of weight values + * stored in a number of paths. See the documentation of + * `WeightsManifestConfig` for more details. + */ + weightsManifest: WeightsManifestConfig; + /** + * Hard-coded format name for models saved from TensorFlow.js or converted + * by TensorFlow.js Converter. + */ + format?: string; + /** + * What library is responsible for originally generating this artifact. + * + * Used for debugging purposes. E.g., 'TensorFlow.js v1.0.0'. + */ + generatedBy?: string; + /** + * What library or tool is responsible for converting the original model + * to this format, applicable only if the model is output by a converter. + * + * Used for debugging purposes. E.g., 'TensorFlow.js Converter v1.0.0'. + * + * A value of `null` means the model artifacts are generated without any + * conversion process (e.g., saved directly from a TensorFlow.js + * `tf.LayersModel` instance.) + */ + convertedBy?: string | null; + /** + * Inputs and outputs signature for saved model. + */ + signature?: {}; + /** + * User-defined metadata about the model. + */ + userDefinedMetadata?: { + [key: string]: {}; + }; + /** + * Initializer for the model. + */ + modelInitializer?: {}; +} + +declare interface ModelPredictConfig { + /** + * Optional. Batch size (Integer). If unspecified, it will default to 32. + */ + batchSize?: number; + /** + * Optional. Verbosity mode. Defaults to false. + */ + verbose?: boolean; +} + +/** Instances of all possible TFJS Graph Models used by Human + * - loaded as needed based on configuration + * - initialized explictly with `human.load()` method + * - initialized implicity on first call to `human.detect()` + * - each model can be `null` if not loaded, instance of `GraphModel` if loaded or `Promise` if loading + */ +export declare class Models { + ssrnetage: null | GraphModel | Promise; + gear: null | GraphModel | Promise; + blazeposedetect: null | GraphModel | Promise; + blazepose: null | GraphModel | Promise; + centernet: null | GraphModel | Promise; + efficientpose: null | GraphModel | Promise; + mobilefacenet: null | GraphModel | Promise; + emotion: null | GraphModel | Promise; + facedetect: null | GraphModel | Promise; + faceiris: null | GraphModel | Promise; + facemesh: null | GraphModel | Promise; + faceres: null | GraphModel | Promise; + ssrnetgender: null | GraphModel | Promise; + handpose: null | GraphModel | Promise; + handskeleton: null | GraphModel | Promise; + handtrack: null | GraphModel | Promise; + liveness: null | GraphModel | Promise; + movenet: null | GraphModel | Promise; + nanodet: null | GraphModel | Promise; + posenet: null | GraphModel | Promise; + segmentation: null | GraphModel | Promise; + antispoof: null | GraphModel | Promise; +} + +declare namespace models { + export { + reset, + load, + validate, + Models + } +} + +/** + * An interface for the manager of a model store. + * + * A model store is defined as a storage medium on which multiple models can + * be stored. Each stored model has a unique `path` as its identifier. + * A `ModelStoreManager` for the store allows actions including + * + * - Listing the models stored in the store. + * - Deleting a model from the store. + */ +declare interface ModelStoreManager { + /** + * List all models in the model store. + * + * @returns A dictionary mapping paths of existing models to their + * model artifacts info. Model artifacts info include type of the model's + * topology, byte sizes of the topology, weights, etc. + */ + listModels(): Promise<{ + [path: string]: ModelArtifactsInfo; + }>; + /** + * Remove a model specified by `path`. + * + * @param path + * @returns ModelArtifactsInfo of the deleted model (if and only if deletion + * is successful). + * @throws Error if deletion fails, e.g., if no model exists at `path`. + */ + removeModel(path: string): Promise; +} + +/** + * Interface for model input/output tensor info. + */ +declare interface ModelTensorInfo { + name: string; + shape?: number[]; + dtype: DataType; + tfDtype?: string; +} + +/** + * Move a model from one URL to another. + * + * This function supports: + * + * 1. Moving within a storage medium, e.g., + * `tf.io.moveModel('localstorage://model-1', 'localstorage://model-2')` + * 2. Moving between two storage mediums, e.g., + * `tf.io.moveModel('localstorage://model-1', 'indexeddb://model-1')` + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Move the model, from Local Storage to IndexedDB. + * await tf.io.moveModel( + * 'localstorage://demo/management/model1', + * 'indexeddb://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Remove the moved model. + * await tf.io.removeModel('indexeddb://demo/management/model1'); + * ``` + * + * @param sourceURL Source URL of moving. + * @param destURL Destination URL of moving. + * @returns ModelArtifactsInfo of the copied model (if and only if copying + * is successful). + * @throws Error if moving fails, e.g., if no model exists at `sourceURL`, or + * if `oldPath` and `newPath` are identical. + * + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function moveModel(sourceURL: string, destURL: string): Promise; + +declare interface NamedTensor { + name: string; + tensor: Tensor; +} + +/** @docalias {[name: string]: Tensor} */ +declare type NamedTensorMap = { + [name: string]: Tensor; +}; + +declare type NamedTensorsMap = { + [key: string]: Tensor[]; +}; + +declare type NumericDataType = 'float32' | 'int32' | 'bool' | 'complex64'; + +/** draw detected objects */ +declare function object(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +/** Configures all object detection specific options */ +export declare interface ObjectConfig extends GenericConfig { + /** minimum confidence for a detected objects before results are discarded */ + minConfidence: number; + /** minimum overlap between two detected objects before one is discarded */ + iouThreshold: number; + /** maximum number of detected objects */ + maxDetected: number; +} + +/** Object results */ +export declare interface ObjectResult { + /** object id */ + id: number; + /** object detection score */ + score: number; + /** detected object class id */ + class: number; + /** detected object class name */ + label: ObjectType; + /** detected object box */ + box: Box; + /** detected object box normalized to 0..1 */ + boxRaw: Box; +} + +export declare type ObjectType = 'person' | 'bicycle' | 'car' | 'motorcycle' | 'airplane' | 'bus' | 'train' | 'truck' | 'boat' | 'traffic light' | 'fire hydrant' | 'stop sign' | 'parking meter' | 'bench' | 'bird' | 'cat' | 'dog' | 'horse' | 'sheep' | 'cow' | 'elephant' | 'bear' | 'zebra' | 'giraffe' | 'backpack' | 'umbrella' | 'handbag' | 'tie' | 'suitcase' | 'frisbee' | 'skis' | 'snowboard' | 'sports ball' | 'kite' | 'baseball bat' | 'baseball glove' | 'skateboard' | 'surfboard' | 'tennis racket' | 'bottle' | 'wine glass' | 'cup' | 'fork' | 'knife' | 'spoon' | 'bowl' | 'banana' | 'apple' | 'sandwich' | 'orange' | 'broccoli' | 'carrot' | 'hot dog' | 'pizza' | 'donut' | 'cake' | 'chair' | 'couch' | 'potted plant' | 'bed' | 'dining table' | 'toilet' | 'tv' | 'laptop' | 'mouse' | 'remote' | 'keyboard' | 'cell phone' | 'microwave' | 'oven' | 'toaster' | 'sink' | 'refrigerator' | 'book' | 'clock' | 'vase' | 'scissors' | 'teddy bear' | 'hair drier' | 'toothbrush'; + +/** + * Callback for the progress of a long-running action such as an HTTP + * request for a large binary object. + * + * `fraction` should be a number in the [0, 1] interval, indicating how + * much of the action has completed. + */ +declare type OnProgressCallback = (fraction: number) => void; + +/** currently set draw options {@link DrawOptions} */ +declare const options: DrawOptions; + +/** draw combined person results instead of individual detection result objects */ +declare function person(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +/** Person getter + * - Triggers combining all individual results into a virtual person object + */ +export declare interface PersonResult { + /** person id */ + id: number; + /** face result that belongs to this person */ + face: FaceResult; + /** body result that belongs to this person */ + body: BodyResult | null; + /** left and right hand results that belong to this person */ + hands: { + left: HandResult | null; + right: HandResult | null; + }; + /** detected gestures specific to this person */ + gestures: Array; + /** box that defines the person */ + box: Box; + /** box that defines the person normalized to 0..1 */ + boxRaw?: Box; +} + +/** generic point as [x, y, z?] */ +export declare type Point = [number, number, number?]; + +export declare type Race = 'white' | 'black' | 'asian' | 'indian' | 'other'; + +export declare enum Rank { + R0 = "R0", + R1 = "R1", + R2 = "R2", + R3 = "R3", + R4 = "R4", + R5 = "R5", + R6 = "R6" +} + +declare interface RecursiveArray { + [index: number]: T | RecursiveArray; +} + +declare const registerLoadRouter: (loudRouter: IORouter) => void; + +declare const registerSaveRouter: (loudRouter: IORouter) => void; + +/** + * Remove a model specified by URL from a reigstered storage medium. + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Delete the model. + * await tf.io.removeModel('localstorage://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * ``` + * + * @param url A URL to a stored model, with a scheme prefix, e.g., + * 'localstorage://my-model-1', 'indexeddb://my/model/2'. + * @returns ModelArtifactsInfo of the deleted model (if and only if deletion + * is successful). + * @throws Error if deletion fails, e.g., if no model exists at `path`. + * + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function removeModel(url: string): Promise; + +/** + * Additional options for Platform.fetch + */ +declare interface RequestDetails { + /** + * Is this request for a binary file (as opposed to a json file) + */ + isBinary?: boolean; +} + +declare function reset(instance: Human): void; + +/** + * Result interface definition for **Human** library + * + * Contains all possible detection results + */ +export declare interface Result { + /** {@link FaceResult}: detection & analysis results */ + face: Array; + /** {@link BodyResult}: detection & analysis results */ + body: Array; + /** {@link HandResult}: detection & analysis results */ + hand: Array; + /** {@link GestureResult}: detection & analysis results */ + gesture: Array; + /** {@link ObjectResult}: detection & analysis results */ + object: Array; + /** global performance object with timing values for each operation */ + performance: Record; + /** optional processed canvas that can be used to draw input on screen */ + canvas?: AnyCanvas | null; + /** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */ + readonly timestamp: number; + /** getter property that returns unified persons object */ + persons: Array; + /** Last known error message */ + error: string | null; +} + +/** + * Options for saving a model. + * @innamespace io + */ +declare interface SaveConfig { + /** + * Whether to save only the trainable weights of the model, ignoring the + * non-trainable ones. + */ + trainableOnly?: boolean; + /** + * Whether the optimizer will be saved (if exists). + * + * Default: `false`. + */ + includeOptimizer?: boolean; +} + +/** + * Type definition for handlers of saving operations. + */ +declare type SaveHandler = (modelArtifact: ModelArtifacts) => Promise; + +/** + * Result of a saving operation. + */ +declare interface SaveResult { + /** + * Information about the model artifacts saved. + */ + modelArtifactsInfo: ModelArtifactsInfo; + /** + * HTTP responses from the server that handled the model-saving request (if + * any). This is applicable only to server-based saving routes. + */ + responses?: Response[]; + /** + * Error messages and related data (if any). + */ + errors?: Array<{} | string>; +} + +/** Configures all body segmentation module + * removes background from input containing person + * if segmentation is enabled it will run as preprocessing task before any other model + * alternatively leave it disabled and use it on-demand using human.segmentation method which can + * remove background or replace it with user-provided background + */ +export declare interface SegmentationConfig extends GenericConfig { + /** blur segmentation output by pixels for more realistic image */ + blur: number; +} + +/** + * @license + * Copyright 2017 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +/// +/** @docalias number[] */ +declare interface ShapeMap { + R0: number[]; + R1: [number]; + R2: [number, number]; + R3: [number, number, number]; + R4: [number, number, number, number]; + R5: [number, number, number, number, number]; + R6: [number, number, number, number, number, number]; +} + +/** Calculates normalized similarity between two face descriptors based on their `distance` + * @param options - calculation options + * - order - algorithm to use + * Euclidean distance if `order` is 2 (default), Minkowski distance algorithm of nth order if `order` is higher than 2 + * - multiplier - by how much to enhance difference analysis in range of 1..100 + * default is 20 which normalizes results to similarity above 0.5 can be considered a match + * - min - normalize similarity result to a given range + * - max - normalzie similarity resutl to a given range + * default is 0.2...0.8 + * Returns similarity between two face descriptors normalized to 0..1 range where 0 is no similarity and 1 is perfect similarity + */ +declare function similarity(descriptor1: Descriptor, descriptor2: Descriptor, options?: MatchOptions): number; + +declare interface SingleValueMap { + bool: boolean; + int32: number; + float32: number; + complex64: number; + string: string; +} + +export declare namespace Tensor { } + +/** + * A `tf.Tensor` object represents an immutable, multidimensional array of + * numbers that has a shape and a data type. + * + * For performance reasons, functions that create tensors do not necessarily + * perform a copy of the data passed to them (e.g. if the data is passed as a + * `Float32Array`), and changes to the data will change the tensor. This is not + * a feature and is not supported. To avoid this behavior, use the tensor before + * changing the input data or create a copy with `copy = tf.add(yourTensor, 0)`. + * + * See `tf.tensor` for details on how to create a `tf.Tensor`. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ +export declare class Tensor { + /** Unique id of this tensor. */ + readonly id: number; + /** + * Id of the bucket holding the data for this tensor. Multiple arrays can + * point to the same bucket (e.g. when calling array.reshape()). + */ + dataId: DataId; + /** The shape of the tensor. */ + readonly shape: ShapeMap[R]; + /** Number of elements in the tensor. */ + readonly size: number; + /** The data type for the array. */ + readonly dtype: DataType; + /** The rank type for the array (see `Rank` enum). */ + readonly rankType: R; + /** Whether this tensor has been globally kept. */ + kept: boolean; + /** The id of the scope this tensor is being tracked in. */ + scopeId: number; + /** + * Number of elements to skip in each dimension when indexing. See + * https://docs.scipy.org/doc/numpy/reference/generated/\ + * numpy.ndarray.strides.html + */ + readonly strides: number[]; + constructor(shape: ShapeMap[R], dtype: DataType, dataId: DataId, id: number); + readonly rank: number; + /** + * Returns a promise of `tf.TensorBuffer` that holds the underlying data. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + buffer(): Promise>; + /** + * Returns a `tf.TensorBuffer` that holds the underlying data. + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + bufferSync(): TensorBuffer; + /** + * Returns the tensor data as a nested array. The transfer of data is done + * asynchronously. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + array(): Promise; + /** + * Returns the tensor data as a nested array. The transfer of data is done + * synchronously. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + arraySync(): ArrayMap[R]; + /** + * Asynchronously downloads the values from the `tf.Tensor`. Returns a + * promise of `TypedArray` that resolves when the computation has finished. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + data(): Promise; + /** + * Copy the tensor's data to a new GPU resource. Comparing to the `dataSync()` + * and `data()`, this method prevents data from being downloaded to CPU. + * + * For WebGL backend, the data will be stored on a densely packed texture. + * This means that the texture will use the RGBA channels to store value. + * + * @param options: + * For WebGL, + * - customTexShape: Optional. If set, will use the user defined + * texture shape to create the texture. + * + * @returns For WebGL backend, a GPUData contains the new texture and + * its information. + * { + * tensorRef: The tensor that is associated with this texture, + * texture: WebGLTexture, + * texShape: [number, number] // [height, width] + * } + * Remember to dispose the GPUData after it is used by + * `res.tensorRef.dispose()`. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + dataToGPU(options?: DataToGPUOptions): GPUData; + /** + * Synchronously downloads the values from the `tf.Tensor`. This blocks the + * UI thread until the values are ready, which can cause performance issues. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + dataSync(): DataTypeMap[D]; + /** Returns the underlying bytes of the tensor's data. */ + bytes(): Promise; + /** + * Disposes `tf.Tensor` from memory. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + dispose(): void; + protected isDisposedInternal: boolean; + readonly isDisposed: boolean; + throwIfDisposed(): void; + /** + * Prints the `tf.Tensor`. See `tf.print` for details. + * + * @param verbose Whether to print verbose information about the tensor, + * including dtype and size. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + print(verbose?: boolean): void; + /** + * Returns a copy of the tensor. See `tf.clone` for details. + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + clone(this: T): T; + /** + * Returns a human-readable description of the tensor. Useful for logging. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + toString(verbose?: boolean): string; + variable(trainable?: boolean, name?: string, dtype?: DataType): Variable; +} + +/** + * A mutable object, similar to `tf.Tensor`, that allows users to set values + * at locations before converting to an immutable `tf.Tensor`. + * + * See `tf.buffer` for creating a tensor buffer. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ +declare class TensorBuffer { + dtype: D; + size: number; + shape: ShapeMap[R]; + strides: number[]; + values: DataTypeMap[D]; + constructor(shape: ShapeMap[R], dtype: D, values?: DataTypeMap[D]); + /** + * Sets a value in the buffer at a given location. + * + * @param value The value to set. + * @param locs The location indices. + * + * @doc {heading: 'Tensors', subheading: 'Creation'} + */ + set(value: SingleValueMap[D], ...locs: number[]): void; + /** + * Returns the value in the buffer at the provided location. + * + * @param locs The location indices. + * + * @doc {heading: 'Tensors', subheading: 'Creation'} + */ + get(...locs: number[]): SingleValueMap[D]; + locToIndex(locs: number[]): number; + indexToLoc(index: number): number[]; + readonly rank: number; + /** + * Creates an immutable `tf.Tensor` object from the buffer. + * + * @doc {heading: 'Tensors', subheading: 'Creation'} + */ + toTensor(): Tensor; +} + +declare interface TensorInfo { + name: string; + shape?: number[]; + dtype: DataType; +} + +/** @docalias TypedArray|Array */ +export declare type TensorLike = TypedArray | number | boolean | string | RecursiveArray | RecursiveArray | RecursiveArray | Uint8Array[]; + +/** Model training configuration. */ +declare interface TrainingConfig { + /** Optimizer used for the model training. */ + optimizer_config: {}; + /** Loss function(s) for the model's output(s). */ + loss: string | string[] | { + [key: string]: string; + }; + /** Metric function(s) for the model's output(s). */ + metrics?: string[] | { + [key: string]: string; + }; + weighted_metrics?: string[]; + sample_weight_mode?: string; + loss_weights?: number[] | { + [key: string]: number; + }; +} + +declare type TypedArray = Float32Array | Int32Array | Uint8Array; + +declare function validate(instance: Human): Promise; + +/** + * A mutable `tf.Tensor`, useful for persisting state, e.g. for training. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ +declare class Variable extends Tensor { + trainable: boolean; + name: string; + constructor(initialValue: Tensor, trainable: boolean, name: string, tensorId: number); + /** + * Assign a new `tf.Tensor` to this variable. The new `tf.Tensor` must have + * the same shape and dtype as the old `tf.Tensor`. + * + * @param newValue New tensor to be assigned to this variable. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + assign(newValue: Tensor): void; + dispose(): void; +} + +/** Possible values for `human.warmup` */ +export declare type WarmupType = ['' | 'none' | 'face' | 'full' | 'body']; + +/** + * Group to which the weight belongs. + * + * - 'optimizer': Weight from a stateful optimizer. + */ +declare type WeightGroup = 'model' | 'optimizer'; + +/** + * Creates a function, which reads a weights manifest JSON configuration, + * fetches the weight files using the specified function and returns them as + * `Tensor`s. + * + * ```js + * // example for creating a nodejs weight loader, which reads the weight files + * // from disk using fs.readFileSync + * + * import * as fs from 'fs' + * + * const fetchWeightsFromDisk = (filePaths: string[]) => + * filePaths.map(filePath => fs.readFileSync(filePath).buffer) + * + * const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk) + * + * const manifest = JSON.parse( + * fs.readFileSync('./my_model-weights_manifest').toString() + * ) + * const weightMap = await loadWeights(manifest, './') + * ``` + * @param fetchWeightsFunction The function used for fetching the weight files. + * @returns Weight loading function. + */ +declare function weightsLoaderFactory(fetchWeightsFunction: (fetchUrls: string[]) => Promise): (manifest: WeightsManifestConfig, filePathPrefix?: string, weightNames?: string[]) => Promise; + +/** + * A weight manifest. + * + * The weight manifest consists of an ordered list of weight-manifest groups. + * Each weight-manifest group ("group" for short hereafter) consists of a + * number of weight values stored in a number of paths. + * See the documentation of `WeightManifestGroupConfig` below for more details. + */ +declare type WeightsManifestConfig = WeightsManifestGroupConfig[]; + +/** + * An entry in the weight manifest. + * + * The entry contains specification of a weight. + */ +declare interface WeightsManifestEntry { + /** + * Name of the weight, e.g., 'Dense_1/bias' + */ + name: string; + /** + * Shape of the weight. + */ + shape: number[]; + /** + * Data type of the weight. + */ + dtype: 'float32' | 'int32' | 'bool' | 'string' | 'complex64'; + /** + * Type of the weight. + * + * Optional. + * + * The value 'optimizer' indicates the weight belongs to an optimizer + * (i.e., used only during model training and not during inference). + */ + group?: WeightGroup; + /** + * Information for dequantization of the weight. + */ + quantization?: { + scale?: number; + min?: number; + dtype: 'uint16' | 'uint8' | 'float16'; + }; +} + +/** + * A weight-manifest group. + * + * Consists of an ordered list of weight values encoded in binary format, + * stored in an ordered list of paths. + */ +declare interface WeightsManifestGroupConfig { + /** + * An ordered list of paths. + * + * Paths are intentionally abstract in order to be general. For example, they + * can be relative URL paths or relative paths on the file system. + */ + paths: string[]; + /** + * Specifications of the weights stored in the paths. + */ + weights: WeightsManifestEntry[]; +} + +/** + * Creates an IOHandler that passes saved model artifacts to a callback. + * + * ```js + * function handleSave(artifacts) { + * // ... do something with the artifacts ... + * return {modelArtifactsInfo: {...}, ...}; + * } + * + * const saveResult = model.save(tf.io.withSaveHandler(handleSave)); + * ``` + * + * @param saveHandler A function that accepts a `ModelArtifacts` and returns a + * `SaveResult`. + */ +declare function withSaveHandler(saveHandler: (artifacts: ModelArtifacts) => Promise): IOHandler; + +export { } diff --git a/dist/human.esm-nobundle.d.ts b/dist/human.esm-nobundle.d.ts new file mode 100644 index 00000000..fca0ad10 --- /dev/null +++ b/dist/human.esm-nobundle.d.ts @@ -0,0 +1,2520 @@ +/// + +/** meta-function that performs draw for: canvas, face, body, hand */ +declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial): Promise<[void, void, void, void, void] | null>; + +/** Defines all possible canvas types */ +export declare type AnyCanvas = HTMLCanvasElement | OffscreenCanvas; + +/** Defines all possible image types */ +export declare type AnyImage = HTMLImageElement | typeof Image; + +/** Defines all possible video types */ +export declare type AnyVideo = HTMLMediaElement | HTMLVideoElement; + +/** @docalias number[] */ +declare interface ArrayMap { + R0: number; + R1: number[]; + R2: number[][]; + R3: number[][][]; + R4: number[][][][]; + R5: number[][][][][]; + R6: number[][][][][][]; +} + +/** Possible TensorFlow backends */ +export declare type BackendType = ['cpu', 'wasm', 'webgl', 'humangl', 'tensorflow', 'webgpu']; + +/** draw detected bodies */ +declare function body(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +export declare type BodyAnnotation = BodyAnnotationBlazePose | BodyAnnotationEfficientPose; + +export declare type BodyAnnotationBlazePose = 'leftLeg' | 'rightLeg' | 'torso' | 'leftArm' | 'rightArm' | 'leftEye' | 'rightEye' | 'mouth'; + +export declare type BodyAnnotationEfficientPose = 'leftLeg' | 'rightLeg' | 'torso' | 'leftArm' | 'rightArm' | 'head'; + +/** Configures all body detection specific options */ +export declare interface BodyConfig extends GenericConfig { + /** maximum number of detected bodies */ + maxDetected: number; + /** minimum confidence for a detected body before results are discarded */ + minConfidence: number; +} + +/** body gesture type */ +export declare type BodyGesture = `leaning ${'left' | 'right'}` | `raise ${'left' | 'right'} hand` | 'i give up'; + +/** Body Result keypoints */ +export declare interface BodyKeypoint { + /** body part name */ + part: BodyLandmark; + /** body part position */ + position: Point; + /** body part position normalized to 0..1 */ + positionRaw: Point; + /** body part position relative to body center in meters */ + distance?: Point; + /** body part detection score */ + score: number; +} + +export declare type BodyLandmark = BodyLandmarkPoseNet | BodyLandmarkMoveNet | BodyLandmarkEfficientNet | BodyLandmarkBlazePose; + +export declare type BodyLandmarkBlazePose = 'nose' | 'leftEyeInside' | 'leftEye' | 'leftEyeOutside' | 'rightEyeInside' | 'rightEye' | 'rightEyeOutside' | 'leftEar' | 'rightEar' | 'leftMouth' | 'rightMouth' | 'leftShoulder' | 'rightShoulder' | 'leftElbow' | 'rightElbow' | 'leftWrist' | 'rightWrist' | 'leftPinky' | 'rightPinky' | 'leftIndex' | 'rightIndex' | 'leftThumb' | 'rightThumb' | 'leftHip' | 'rightHip' | 'leftKnee' | 'rightKnee' | 'leftAnkle' | 'rightAnkle' | 'leftHeel' | 'rightHeel' | 'leftFoot' | 'rightFoot' | 'bodyCenter' | 'bodyTop' | 'leftPalm' | 'leftHand' | 'rightPalm' | 'rightHand'; + +export declare type BodyLandmarkEfficientNet = 'head' | 'neck' | 'rightShoulder' | 'rightElbow' | 'rightWrist' | 'chest' | 'leftShoulder' | 'leftElbow' | 'leftWrist' | 'bodyCenter' | 'rightHip' | 'rightKnee' | 'rightAnkle' | 'leftHip' | 'leftKnee' | 'leftAnkle'; + +export declare type BodyLandmarkMoveNet = 'nose' | 'leftEye' | 'rightEye' | 'leftEar' | 'rightEar' | 'leftShoulder' | 'rightShoulder' | 'leftElbow' | 'rightElbow' | 'leftWrist' | 'rightWrist' | 'leftHip' | 'rightHip' | 'leftKnee' | 'rightKnee' | 'leftAnkle' | 'rightAnkle'; + +export declare type BodyLandmarkPoseNet = 'nose' | 'leftEye' | 'rightEye' | 'leftEar' | 'rightEar' | 'leftShoulder' | 'rightShoulder' | 'leftElbow' | 'rightElbow' | 'leftWrist' | 'rightWrist' | 'leftHip' | 'rightHip' | 'leftKnee' | 'rightKnee' | 'leftAnkle' | 'rightAnkle'; + +/** Body results */ +export declare interface BodyResult { + /** body id */ + id: number; + /** body detection score */ + score: number; + /** detected body box */ + box: Box; + /** detected body box normalized to 0..1 */ + boxRaw: Box; + /** detected body keypoints */ + keypoints: Array; + /** detected body keypoints combined into annotated parts */ + annotations: Record; +} + +/** generic box as [x, y, width, height] */ +export declare type Box = [number, number, number, number]; + +/** + * Creates an IOHandler that loads model artifacts from user-selected files. + * + * This method can be used for loading from files such as user-selected files + * in the browser. + * When used in conjunction with `tf.loadLayersModel`, an instance of + * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts. + * + * ```js + * // Note: This code snippet won't run properly without the actual file input + * // elements in the HTML DOM. + * + * // Suppose there are two HTML file input (``) + * // elements. + * const uploadJSONInput = document.getElementById('upload-json'); + * const uploadWeightsInput = document.getElementById('upload-weights'); + * const model = await tf.loadLayersModel(tf.io.browserFiles( + * [uploadJSONInput.files[0], uploadWeightsInput.files[0]])); + * ``` + * + * @param files `File`s to load from. Currently, this function supports only + * loading from files that contain Keras-style models (i.e., `tf.Model`s), for + * which an `Array` of `File`s is expected (in that order): + * - A JSON file containing the model topology and weight manifest. + * - Optionally, One or more binary files containing the binary weights. + * These files must have names that match the paths in the `weightsManifest` + * contained by the aforementioned JSON file, or errors will be thrown + * during loading. These weights files have the same format as the ones + * generated by `tensorflowjs_converter` that comes with the `tensorflowjs` + * Python PIP package. If no weights files are provided, only the model + * topology will be loaded from the JSON file above. + * @returns An instance of `Files` `IOHandler`. + * + * @doc { + * heading: 'Models', + * subheading: 'Loading', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function browserFiles(files: File[]): IOHandler; + +/** + * Deprecated. Use `tf.io.http`. + * @param path + * @param loadOptions + */ +declare function browserHTTPRequest(path: string, loadOptions?: LoadOptions): IOHandler; + +/** draw processed canvas */ +declare function canvas(input: AnyCanvas | HTMLImageElement | HTMLVideoElement, output: AnyCanvas): Promise; + +/** + * Concatenate a number of ArrayBuffers into one. + * + * @param buffers A number of array buffers to concatenate. + * @returns Result of concatenating `buffers` in order. + */ +declare function concatenateArrayBuffers(buffers: ArrayBuffer[]): ArrayBuffer; + +/** + * Configuration interface definition for **Human** library + * Contains all configurable parameters + * Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L262) + */ +export declare interface Config { + /** Backend used for TFJS operations + * valid build-in backends are: + * - Browser: `cpu`, `wasm`, `webgl`, `humangl`, `webgpu` + * - NodeJS: `cpu`, `wasm`, `tensorflow` + * default: `humangl` for browser and `tensorflow` for nodejs + */ + backend: '' | 'cpu' | 'wasm' | 'webgl' | 'humangl' | 'tensorflow' | 'webgpu'; + /** Path to *.wasm files if backend is set to `wasm` + * + * default: auto-detects to link to CDN `jsdelivr` when running in browser + */ + wasmPath: string; + /** Print debug statements to console + * + * default: `true` + */ + debug: boolean; + /** Perform model loading and inference concurrently or sequentially + * + * default: `true` + */ + async: boolean; + /** What to use for `human.warmup()` + * - warmup pre-initializes all models for faster inference but can take significant time on startup + * - used by `webgl`, `humangl` and `webgpu` backends + * + * default: `full` + */ + warmup: '' | 'none' | 'face' | 'full' | 'body'; + /** Base model path (typically starting with file://, http:// or https://) for all models + * - individual modelPath values are relative to this path + * + * default: `../models/` for browsers and `file://models/` for nodejs + */ + modelBasePath: string; + /** Cache models in IndexDB on first sucessfull load + * default: true if indexdb is available (browsers), false if its not (nodejs) + */ + cacheModels: boolean; + /** Cache sensitivity + * - values 0..1 where 0.01 means reset cache if input changed more than 1% + * - set to 0 to disable caching + * + * default: 0.7 + */ + cacheSensitivity: number; + /** Perform immediate garbage collection on deallocated tensors instead of caching them */ + deallocate: boolean; + /** Internal Variable */ + skipAllowed: boolean; + /** Filter config {@link FilterConfig} */ + filter: Partial; + /** Gesture config {@link GestureConfig} */ + gesture: Partial; + /** Face config {@link FaceConfig} */ + face: Partial; + /** Body config {@link BodyConfig} */ + body: Partial; + /** Hand config {@link HandConfig} */ + hand: Partial; + /** Object config {@link ObjectConfig} */ + object: Partial; + /** Segmentation config {@link SegmentationConfig} */ + segmentation: Partial; +} + +/** + * Copy a model from one URL to another. + * + * This function supports: + * + * 1. Copying within a storage medium, e.g., + * `tf.io.copyModel('localstorage://model-1', 'localstorage://model-2')` + * 2. Copying between two storage mediums, e.g., + * `tf.io.copyModel('localstorage://model-1', 'indexeddb://model-1')` + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Copy the model, from Local Storage to IndexedDB. + * await tf.io.copyModel( + * 'localstorage://demo/management/model1', + * 'indexeddb://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Remove both models. + * await tf.io.removeModel('localstorage://demo/management/model1'); + * await tf.io.removeModel('indexeddb://demo/management/model1'); + * ``` + * + * @param sourceURL Source URL of copying. + * @param destURL Destination URL of copying. + * @returns ModelArtifactsInfo of the copied model (if and only if copying + * is successful). + * @throws Error if copying fails, e.g., if no model exists at `sourceURL`, or + * if `oldPath` and `newPath` are identical. + * + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function copyModel(sourceURL: string, destURL: string): Promise; + +/** + * We wrap data id since we use weak map to avoid memory leaks. + * Since we have our own memory management, we have a reference counter + * mapping a tensor to its data, so there is always a pointer (even if that + * data is otherwise garbage collectable). + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/ + * Global_Objects/WeakMap + */ +declare type DataId = object; + +declare type DataToGPUOptions = DataToGPUWebGLOption; + +declare interface DataToGPUWebGLOption { + customTexShape?: [number, number]; +} + +/** @docalias 'float32'|'int32'|'bool'|'complex64'|'string' */ +declare type DataType = keyof DataTypeMap; + +declare interface DataTypeMap { + float32: Float32Array; + int32: Int32Array; + bool: Uint8Array; + complex64: Float32Array; + string: string[]; +} + +/** + * Decode flat ArrayBuffer as weights. + * + * This function does not handle sharding. + * + * This function is the reverse of `encodeWeights`. + * + * @param buffer A flat ArrayBuffer carrying the binary values of the tensors + * concatenated in the order specified in `specs`. + * @param specs Specifications of the names, dtypes and shapes of the tensors + * whose value are encoded by `buffer`. + * @return A map from tensor name to tensor value, with the names corresponding + * to names in `specs`. + * @throws Error, if any of the tensors has unsupported dtype. + */ +declare function decodeWeights(buffer: ArrayBuffer, specs: WeightsManifestEntry[]): NamedTensorMap; + +/** - [See all default Config values...](https://github.com/vladmandic/human/blob/main/src/config.ts#L262) */ +export declare const defaults: Config; + +/** Face descriptor type as number array */ +export declare type Descriptor = Array; + +/** Calculates distance between two descriptors + * @param options - calculation options + * - order - algorithm to use + * Euclidean distance if `order` is 2 (default), Minkowski distance algorithm of nth order if `order` is higher than 2 + * - multiplier - by how much to enhance difference analysis in range of 1..100 + * default is 20 which normalizes results to similarity above 0.5 can be considered a match + */ +declare function distance(descriptor1: Descriptor, descriptor2: Descriptor, options?: MatchOptions): number; + +declare namespace draw { + export { + gesture, + face, + body, + hand, + object, + person, + canvas, + all, + DrawOptions, + options + } +} + +/** Draw Options + * - Accessed via `human.draw.options` or provided per each draw method as the drawOptions optional parameter + */ +export declare type DrawOptions = { + /** draw line color */ + color: string; + /** label color */ + labelColor: string; + /** label shadow color */ + shadowColor: string; + /** label font */ + font: string; + /** line spacing between labels */ + lineHeight: number; + /** line width for drawn lines */ + lineWidth: number; + /** size of drawn points */ + pointSize: number; + /** draw rounded boxes by n pixels */ + roundRect: number; + /** should points be drawn? */ + drawPoints: boolean; + /** should labels be drawn? */ + drawLabels: boolean; + /** should detected gestures be drawn? */ + drawGestures: boolean; + /** should draw boxes around detection results? */ + drawBoxes: boolean; + /** should draw polygons from detection points? */ + drawPolygons: boolean; + /** should draw gaze arrows? */ + drawGaze: boolean; + /** should fill polygons? */ + fillPolygons: boolean; + /** use z-coordinate when available */ + useDepth: boolean; + /** should lines be curved? */ + useCurves: boolean; +}; + +export declare type Emotion = 'angry' | 'disgust' | 'fear' | 'happy' | 'sad' | 'surprise' | 'neutral'; + +/** + * Encode a map from names to weight values as an ArrayBuffer, along with an + * `Array` of `WeightsManifestEntry` as specification of the encoded weights. + * + * This function does not perform sharding. + * + * This function is the reverse of `decodeWeights`. + * + * @param tensors A map ("dict") from names to tensors. + * @param group Group to which the weights belong (optional). + * @returns A `Promise` of + * - A flat `ArrayBuffer` with all the binary values of the `Tensor`s + * concatenated. + * - An `Array` of `WeightManifestEntry`s, carrying information including + * tensor names, `dtype`s and shapes. + * @throws Error: on unsupported tensor `dtype`. + */ +declare function encodeWeights(tensors: NamedTensorMap | NamedTensor[], group?: WeightGroup): Promise<{ + data: ArrayBuffer; + specs: WeightsManifestEntry[]; +}>; + +/** Env class that holds detected capabilities */ +export declare class Env { + /** Running in Browser */ + browser: boolean; + /** Running in NodeJS */ + node: boolean; + /** Running in WebWorker thread */ + worker: boolean; + /** Detected platform */ + platform: string; + /** Detected agent */ + agent: string; + /** List of supported backends */ + backends: string[]; + /** Has any work been performed so far */ + initial: boolean; + /** Are image filters supported? */ + filter: boolean | undefined; + /** TFJS instance details */ + tfjs: { + version: undefined | string; + }; + /** Is offscreenCanvas supported? */ + offscreen: undefined | boolean; + /** Are performance counter instant values or additive */ + perfadd: boolean; + /** WASM detected capabilities */ + wasm: { + supported: undefined | boolean; + backend: undefined | boolean; + simd: undefined | boolean; + multithread: undefined | boolean; + }; + /** WebGL detected capabilities */ + webgl: { + supported: undefined | boolean; + backend: undefined | boolean; + version: undefined | string; + renderer: undefined | string; + }; + /** WebGPU detected capabilities */ + webgpu: { + supported: undefined | boolean; + backend: undefined | boolean; + adapter: undefined | string; + }; + /** CPU info */ + cpu: { + model: undefined | string; + flags: string[]; + }; + /** List of supported kernels for current backend */ + kernels: string[]; + /** MonkeyPatch for Canvas */ + Canvas: undefined; + /** MonkeyPatch for Image */ + Image: undefined; + /** MonkeyPatch for ImageData */ + ImageData: undefined; + constructor(); + /** update backend information */ + updateBackend(): Promise; + /** update cpu information */ + updateCPU(): Promise; +} + +export declare const env: Env; + +/** Events dispatched by `human.events` + * - `create`: triggered when Human object is instantiated + * - `load`: triggered when models are loaded (explicitly or on-demand) + * - `image`: triggered when input image is processed + * - `result`: triggered when detection is complete + * - `warmup`: triggered when warmup is complete + */ +export declare type Events = 'create' | 'load' | 'image' | 'result' | 'warmup' | 'error'; + +/** Defines possible externally defined canvas */ +export declare type ExternalCanvas = typeof env.Canvas; + +/** draw detected faces */ +declare function face(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +/** Anti-spoofing part of face configuration */ +export declare interface FaceAntiSpoofConfig extends GenericConfig { +} + +/** Configures all face-specific options: face detection, mesh analysis, age, gender, emotion detection and face description */ +export declare interface FaceConfig extends GenericConfig { + detector: Partial; + mesh: Partial; + iris: Partial; + description: Partial; + emotion: Partial; + antispoof: Partial; + liveness: Partial; +} + +/** Description or face embedding part of face configuration + * - also used by age and gender detection + */ +export declare interface FaceDescriptionConfig extends GenericConfig { + /** minimum confidence for a detected face before results are discarded */ + minConfidence: number; +} + +/** Detector part of face configuration */ +export declare interface FaceDetectorConfig extends GenericConfig { + /** is face rotation correction performed after detecting face? + * used to correctly analyze faces under high angles + */ + rotation: boolean; + /** maximum number of detected faces */ + maxDetected: number; + /** minimum confidence for a detected face before results are discarded */ + minConfidence: number; + /** minimum overlap between two detected faces before one is discarded */ + iouThreshold: number; + /** should child models perform on masked image of a face */ + mask: boolean; + /** should face detection return processed and cropped face tensor that can with an external model for addtional processing? + * if enabled it must be manually deallocated to avoid memory leak */ + return: boolean; +} + +/** Emotion part of face configuration */ +export declare interface FaceEmotionConfig extends GenericConfig { + /** minimum confidence for a detected face before results are discarded */ + minConfidence: number; +} + +/** face gesture type */ +export declare type FaceGesture = `facing ${'left' | 'center' | 'right'}` | `blink ${'left' | 'right'} eye` | `mouth ${number}% open` | `head ${'up' | 'down'}`; + +/** Iris part of face configuration */ +export declare interface FaceIrisConfig extends GenericConfig { +} + +export declare type FaceLandmark = 'leftEye' | 'rightEye' | 'nose' | 'mouth' | 'leftEar' | 'rightEar' | 'symmetryLine' | 'silhouette' | 'lipsUpperOuter' | 'lipsLowerOuter' | 'lipsUpperInner' | 'lipsLowerInner' | 'rightEyeUpper0' | 'rightEyeLower0' | 'rightEyeUpper1' | 'rightEyeLower1' | 'rightEyeUpper2' | 'rightEyeLower2' | 'rightEyeLower3' | 'rightEyebrowUpper' | 'rightEyebrowLower' | 'rightEyeIris' | 'leftEyeUpper0' | 'leftEyeLower0' | 'leftEyeUpper1' | 'leftEyeLower1' | 'leftEyeUpper2' | 'leftEyeLower2' | 'leftEyeLower3' | 'leftEyebrowUpper' | 'leftEyebrowLower' | 'leftEyeIris' | 'midwayBetweenEyes' | 'noseTip' | 'noseBottom' | 'noseRightCorner' | 'noseLeftCorner' | 'rightCheek' | 'leftCheek'; + +/** Liveness part of face configuration */ +export declare interface FaceLivenessConfig extends GenericConfig { +} + +/** Mesh part of face configuration */ +export declare interface FaceMeshConfig extends GenericConfig { +} + +/** Face results + * - Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models + * - Some values may be null if specific model is not enabled + */ +export declare interface FaceResult { + /** face id */ + id: number; + /** overall face score */ + score: number; + /** detection score */ + boxScore: number; + /** mesh score */ + faceScore: number; + /** detected face box */ + box: Box; + /** detected face box normalized to 0..1 */ + boxRaw: Box; + /** detected face mesh */ + mesh: Array; + /** detected face mesh normalized to 0..1 */ + meshRaw: Array; + /** mesh keypoints combined into annotated results */ + annotations: Record; + /** detected age */ + age?: number; + /** detected gender */ + gender?: Gender; + /** gender detection score */ + genderScore?: number; + /** detected emotions */ + emotion?: Array<{ + score: number; + emotion: Emotion; + }>; + /** detected race */ + race?: Array<{ + score: number; + race: Race; + }>; + /** face descriptor */ + embedding?: Array; + /** face iris distance from camera */ + iris?: number; + /** face anti-spoofing result confidence */ + real?: number; + /** face liveness result confidence */ + live?: number; + /** face rotation details */ + rotation?: { + angle: { + roll: number; + yaw: number; + pitch: number; + }; + matrix: [number, number, number, number, number, number, number, number, number]; + gaze: { + bearing: number; + strength: number; + }; + } | null; + /** detected face as tensor that can be used in further pipelines */ + tensor?: Tensor; +} + +/** Run input through image filters before inference + * - available only in Browser environments + * - image filters run with near-zero latency as they are executed on the GPU using WebGL + */ +export declare interface FilterConfig { + /** are image filters enabled? */ + enabled: boolean; + /** perform image histogram equalization + * - equalization is performed on input as a whole and detected face before its passed for further analysis + */ + equalization: boolean; + /** resize input width + * - if both width and height are set to 0, there is no resizing + * - if just one is set, second one is scaled automatically + * - if both are set, values are used as-is + */ + width: number; + /** resize input height + * - if both width and height are set to 0, there is no resizing + * - if just one is set, second one is scaled automatically + * - if both are set, values are used as-is + */ + height: number; + /** return processed canvas imagedata in result */ + return: boolean; + /** flip input as mirror image */ + flip: boolean; + /** range: -1 (darken) to 1 (lighten) */ + brightness: number; + /** range: -1 (reduce contrast) to 1 (increase contrast) */ + contrast: number; + /** range: 0 (no sharpening) to 1 (maximum sharpening) */ + sharpness: number; + /** range: 0 (no blur) to N (blur radius in pixels) */ + blur: number; + /** range: -1 (reduce saturation) to 1 (increase saturation) */ + saturation: number; + /** range: 0 (no change) to 360 (hue rotation in degrees) */ + hue: number; + /** image negative */ + negative: boolean; + /** image sepia colors */ + sepia: boolean; + /** image vintage colors */ + vintage: boolean; + /** image kodachrome colors */ + kodachrome: boolean; + /** image technicolor colors */ + technicolor: boolean; + /** image polaroid camera effect */ + polaroid: boolean; + /** range: 0 (no pixelate) to N (number of pixels to pixelate) */ + pixelate: number; +} + +export declare type Finger = 'index' | 'middle' | 'pinky' | 'ring' | 'thumb' | 'palm'; + +export declare type FingerCurl = 'none' | 'half' | 'full'; + +export declare type FingerDirection = 'verticalUp' | 'verticalDown' | 'horizontalLeft' | 'horizontalRight' | 'diagonalUpRight' | 'diagonalUpLeft' | 'diagonalDownRight' | 'diagonalDownLeft'; + +/** + * Creates an IOHandler that loads model artifacts from memory. + * + * When used in conjunction with `tf.loadLayersModel`, an instance of + * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts. + * + * ```js + * const model = await tf.loadLayersModel(tf.io.fromMemory( + * modelTopology, weightSpecs, weightData)); + * ``` + * + * @param modelArtifacts a object containing model topology (i.e., parsed from + * the JSON format). + * @param weightSpecs An array of `WeightsManifestEntry` objects describing the + * names, shapes, types, and quantization of the weight data. + * @param weightData A single `ArrayBuffer` containing the weight data, + * concatenated in the order described by the weightSpecs. + * @param trainingConfig Model training configuration. Optional. + * + * @returns A passthrough `IOHandler` that simply loads the provided data. + */ +declare function fromMemory(modelArtifacts: {} | ModelArtifacts, weightSpecs?: WeightsManifestEntry[], weightData?: ArrayBuffer, trainingConfig?: TrainingConfig): IOHandler; + +export declare type Gender = 'male' | 'female' | 'unknown'; + +/** Generic config type inherited by all module types */ +export declare interface GenericConfig { + /** is module enabled? */ + enabled: boolean; + /** path to model json file (relative to `modelBasePath` */ + modelPath: string; + /** how many max frames to go without re-running model if cached results are acceptable + * for two-phase models such as face and hand caching applies to bounding boxes detection only */ + skipFrames: number; + /** how many max milliseconds to go without re-running model if cached results are acceptable + * for two-phase models such as face and hand caching applies to bounding boxes detection only */ + skipTime: number; +} + +/** draw detected gestures */ +declare function gesture(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +/** Controlls gesture detection */ +export declare interface GestureConfig { + /** is gesture detection enabled? */ + enabled: boolean; +} + +/** Gesture combined results + * Each result has: + * - part: part name and number where gesture was detected: `face`, `iris`, `body`, `hand` + * - gesture: gesture detected + */ +export declare type GestureResult = { + 'face': number; + gesture: FaceGesture; +} | { + 'iris': number; + gesture: IrisGesture; +} | { + 'body': number; + gesture: BodyGesture; +} | { + 'hand': number; + gesture: HandGesture; +}; + +declare const getLoadHandlers: (url: string | string[], loadOptions?: LoadOptions) => IOHandler[]; + +/** + * Create `ModelArtifacts` from a JSON file. + * + * @param modelJSON Object containing the parsed JSON of `model.json` + * @param loadWeights Function that takes the JSON file's weights manifest, + * reads weights from the listed path(s), and returns a Promise of the + * weight manifest entries along with the weights data. + * @returns A Promise of the `ModelArtifacts`, as described by the JSON file. + */ +declare function getModelArtifactsForJSON(modelJSON: ModelJSON, loadWeights: (weightsManifest: WeightsManifestConfig) => Promise<[WeightsManifestEntry[], /* weightData */ ArrayBuffer]>): Promise; + +/** + * Populate ModelArtifactsInfo fields for a model with JSON topology. + * @param modelArtifacts + * @returns A ModelArtifactsInfo object. + */ +declare function getModelArtifactsInfoForJSON(modelArtifacts: ModelArtifacts): ModelArtifactsInfo; + +declare const getSaveHandlers: (url: string | string[]) => IOHandler[]; + +declare interface GPUData { + tensorRef: Tensor; + texture?: WebGLTexture; + texShape?: [number, number]; +} + +/** + * A `tf.GraphModel` is a directed, acyclic graph built from a + * SavedModel GraphDef and allows inference execution. + * + * A `tf.GraphModel` can only be created by loading from a model converted from + * a [TensorFlow SavedModel](https://www.tensorflow.org/guide/saved_model) using + * the command line converter tool and loaded via `tf.loadGraphModel`. + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ +export declare class GraphModel implements InferenceModel { + private modelUrl; + private loadOptions; + private executor; + private version; + private handler; + private artifacts; + private initializer; + private resourceManager; + private signature; + readonly modelVersion: string; + readonly inputNodes: string[]; + readonly outputNodes: string[]; + readonly inputs: TensorInfo[]; + readonly outputs: TensorInfo[]; + readonly weights: NamedTensorsMap; + readonly metadata: {}; + readonly modelSignature: {}; + /** + * @param modelUrl url for the model, or an `io.IOHandler`. + * @param weightManifestUrl url for the weight file generated by + * scripts/convert.py script. + * @param requestOption options for Request, which allows to send credentials + * and custom headers. + * @param onProgress Optional, progress callback function, fired periodically + * before the load is completed. + */ + constructor(modelUrl: string | io.IOHandler, loadOptions?: io.LoadOptions); + private findIOHandler; + /** + * Loads the model and weight files, construct the in memory weight map and + * compile the inference graph. + */ + load(): Promise; + /** + * Synchronously construct the in memory weight map and + * compile the inference graph. Also initialize hashtable if any. + * + * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true} + */ + loadSync(artifacts: io.ModelArtifacts): boolean; + /** + * Save the configuration and/or weights of the GraphModel. + * + * An `IOHandler` is an object that has a `save` method of the proper + * signature defined. The `save` method manages the storing or + * transmission of serialized data ("artifacts") that represent the + * model's topology and weights onto or via a specific medium, such as + * file downloads, local storage, IndexedDB in the web browser and HTTP + * requests to a server. TensorFlow.js provides `IOHandler` + * implementations for a number of frequently used saving mediums, such as + * `tf.io.browserDownloads` and `tf.io.browserLocalStorage`. See `tf.io` + * for more details. + * + * This method also allows you to refer to certain types of `IOHandler`s + * as URL-like string shortcuts, such as 'localstorage://' and + * 'indexeddb://'. + * + * Example 1: Save `model`'s topology and weights to browser [local + * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage); + * then load it back. + * + * ```js + * const modelUrl = + * 'https://storage.googleapis.com/tfjs-models/savedmodel/mobilenet_v2_1.0_224/model.json'; + * const model = await tf.loadGraphModel(modelUrl); + * const zeros = tf.zeros([1, 224, 224, 3]); + * model.predict(zeros).print(); + * + * const saveResults = await model.save('localstorage://my-model-1'); + * + * const loadedModel = await tf.loadGraphModel('localstorage://my-model-1'); + * console.log('Prediction from loaded model:'); + * model.predict(zeros).print(); + * ``` + * + * @param handlerOrURL An instance of `IOHandler` or a URL-like, + * scheme-based string shortcut for `IOHandler`. + * @param config Options for saving the model. + * @returns A `Promise` of `SaveResult`, which summarizes the result of + * the saving, such as byte sizes of the saved artifacts for the model's + * topology and weight values. + * + * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true} + */ + save(handlerOrURL: io.IOHandler | string, config?: io.SaveConfig): Promise; + /** + * Execute the inference for the input tensors. + * + * @param input The input tensors, when there is single input for the model, + * inputs param should be a `tf.Tensor`. For models with mutliple inputs, + * inputs params should be in either `tf.Tensor`[] if the input order is + * fixed, or otherwise NamedTensorMap format. + * + * For model with multiple inputs, we recommend you use NamedTensorMap as the + * input type, if you use `tf.Tensor`[], the order of the array needs to + * follow the + * order of inputNodes array. @see {@link GraphModel.inputNodes} + * + * You can also feed any intermediate nodes using the NamedTensorMap as the + * input type. For example, given the graph + * InputNode => Intermediate => OutputNode, + * you can execute the subgraph Intermediate => OutputNode by calling + * model.execute('IntermediateNode' : tf.tensor(...)); + * + * This is useful for models that uses tf.dynamic_rnn, where the intermediate + * state needs to be fed manually. + * + * For batch inference execution, the tensors for each input need to be + * concatenated together. For example with mobilenet, the required input shape + * is [1, 244, 244, 3], which represents the [batch, height, width, channel]. + * If we are provide a batched data of 100 images, the input tensor should be + * in the shape of [100, 244, 244, 3]. + * + * @param config Prediction configuration for specifying the batch size and + * output node names. Currently the batch size option is ignored for graph + * model. + * + * @returns Inference result tensors. The output would be single `tf.Tensor` + * if model has single output node, otherwise Tensor[] or NamedTensorMap[] + * will be returned for model with multiple outputs. + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + predict(inputs: Tensor | Tensor[] | NamedTensorMap, config?: ModelPredictConfig): Tensor | Tensor[] | NamedTensorMap; + private normalizeInputs; + private normalizeOutputs; + /** + * Executes inference for the model for given input tensors. + * @param inputs tensor, tensor array or tensor map of the inputs for the + * model, keyed by the input node names. + * @param outputs output node name from the Tensorflow model, if no + * outputs are specified, the default outputs of the model would be used. + * You can inspect intermediate nodes of the model by adding them to the + * outputs array. + * + * @returns A single tensor if provided with a single output or no outputs + * are provided and there is only one default output, otherwise return a + * tensor array. The order of the tensor array is the same as the outputs + * if provided, otherwise the order of outputNodes attribute of the model. + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + execute(inputs: Tensor | Tensor[] | NamedTensorMap, outputs?: string | string[]): Tensor | Tensor[]; + /** + * Executes inference for the model for given input tensors in async + * fashion, use this method when your model contains control flow ops. + * @param inputs tensor, tensor array or tensor map of the inputs for the + * model, keyed by the input node names. + * @param outputs output node name from the Tensorflow model, if no outputs + * are specified, the default outputs of the model would be used. You can + * inspect intermediate nodes of the model by adding them to the outputs + * array. + * + * @returns A Promise of single tensor if provided with a single output or + * no outputs are provided and there is only one default output, otherwise + * return a tensor map. + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + executeAsync(inputs: Tensor | Tensor[] | NamedTensorMap, outputs?: string | string[]): Promise; + /** + * Get intermediate tensors for model debugging mode (flag + * KEEP_INTERMEDIATE_TENSORS is true). + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + getIntermediateTensors(): NamedTensorsMap; + /** + * Dispose intermediate tensors for model debugging mode (flag + * KEEP_INTERMEDIATE_TENSORS is true). + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + disposeIntermediateTensors(): void; + private convertTensorMapToTensorsMap; + /** + * Releases the memory used by the weight tensors and resourceManager. + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + dispose(): void; +} + +/** draw detected hands */ +declare function hand(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +/** Configures all hand detection specific options */ +export declare interface HandConfig extends GenericConfig { + /** should hand rotation correction be performed after hand detection? */ + rotation: boolean; + /** minimum confidence for a detected hand before results are discarded */ + minConfidence: number; + /** minimum overlap between two detected hands before one is discarded */ + iouThreshold: number; + /** maximum number of detected hands */ + maxDetected: number; + /** should hand landmarks be detected or just return detected hand box */ + landmarks: boolean; + detector: { + /** path to hand detector model json */ + modelPath?: string; + }; + skeleton: { + /** path to hand skeleton model json */ + modelPath?: string; + }; +} + +/** hand gesture type */ +export declare type HandGesture = `${'thumb' | 'index' | 'middle' | 'ring' | 'pinky'} forward` | `${'thumb' | 'index' | 'middle' | 'ring' | 'pinky'} up` | 'victory' | 'thumbs up'; + +/** Hand results */ +export declare interface HandResult { + /** hand id */ + id: number; + /** hand overal score */ + score: number; + /** hand detection score */ + boxScore: number; + /** hand skelton score */ + fingerScore: number; + /** detected hand box */ + box: Box; + /** detected hand box normalized to 0..1 */ + boxRaw: Box; + /** detected hand keypoints */ + keypoints: Array; + /** detected hand class */ + label: HandType; + /** detected hand keypoints combined into annotated parts */ + annotations: Record>; + /** detected hand parts annotated with part gestures */ + landmarks: Record; +} + +export declare type HandType = 'hand' | 'fist' | 'pinch' | 'point' | 'face' | 'tip' | 'pinchtip'; + +/** + * Creates an IOHandler subtype that sends model artifacts to HTTP server. + * + * An HTTP request of the `multipart/form-data` mime type will be sent to the + * `path` URL. The form data includes artifacts that represent the topology + * and/or weights of the model. In the case of Keras-style `tf.Model`, two + * blobs (files) exist in form-data: + * - A JSON file consisting of `modelTopology` and `weightsManifest`. + * - A binary weights file consisting of the concatenated weight values. + * These files are in the same format as the one generated by + * [tfjs_converter](https://js.tensorflow.org/tutorials/import-keras.html). + * + * The following code snippet exemplifies the client-side code that uses this + * function: + * + * ```js + * const model = tf.sequential(); + * model.add( + * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'})); + * + * const saveResult = await model.save(tf.io.http( + * 'http://model-server:5000/upload', {requestInit: {method: 'PUT'}})); + * console.log(saveResult); + * ``` + * + * If the default `POST` method is to be used, without any custom parameters + * such as headers, you can simply pass an HTTP or HTTPS URL to `model.save`: + * + * ```js + * const saveResult = await model.save('http://model-server:5000/upload'); + * ``` + * + * The following GitHub Gist + * https://gist.github.com/dsmilkov/1b6046fd6132d7408d5257b0976f7864 + * implements a server based on [flask](https://github.com/pallets/flask) that + * can receive the request. Upon receiving the model artifacts via the requst, + * this particular server reconsistutes instances of [Keras + * Models](https://keras.io/models/model/) in memory. + * + * + * @param path A URL path to the model. + * Can be an absolute HTTP path (e.g., + * 'http://localhost:8000/model-upload)') or a relative path (e.g., + * './model-upload'). + * @param requestInit Request configurations to be used when sending + * HTTP request to server using `fetch`. It can contain fields such as + * `method`, `credentials`, `headers`, `mode`, etc. See + * https://developer.mozilla.org/en-US/docs/Web/API/Request/Request + * for more information. `requestInit` must not have a body, because the + * body will be set by TensorFlow.js. File blobs representing the model + * topology (filename: 'model.json') and the weights of the model (filename: + * 'model.weights.bin') will be appended to the body. If `requestInit` has a + * `body`, an Error will be thrown. + * @param loadOptions Optional configuration for the loading. It includes the + * following fields: + * - weightPathPrefix Optional, this specifies the path prefix for weight + * files, by default this is calculated from the path param. + * - fetchFunc Optional, custom `fetch` function. E.g., in Node.js, + * the `fetch` from node-fetch can be used here. + * - onProgress Optional, progress callback function, fired periodically + * before the load is completed. + * @returns An instance of `IOHandler`. + * + * @doc { + * heading: 'Models', + * subheading: 'Loading', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function http(path: string, loadOptions?: LoadOptions): IOHandler; + +/** **Human** library main class + * + * All methods and properties are available only as members of Human class + * + * - Configuration object definition: {@link Config} + * - Results object definition: {@link Result} + * - Possible inputs: {@link Input} + * + * @param userConfig - {@link Config} + * @returns instance of {@link Human} + */ +declare class Human { + #private; + /** Current version of Human library in *semver* format */ + version: string; + /** Current configuration + * - Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L262) + */ + config: Config; + /** Last known result of detect run + * - Can be accessed anytime after initial detection + */ + result: Result; + /** Current state of Human library + * - Can be polled to determine operations that are currently executed + * - Progresses through: 'config', 'check', 'backend', 'load', 'run:', 'idle' + */ + state: string; + /** currenty processed image tensor and canvas */ + process: { + tensor: Tensor | null; + canvas: AnyCanvas | null; + }; + /** Instance of TensorFlow/JS used by Human + * - Can be embedded or externally provided + * [TFJS API]: {@link https://js.tensorflow.org/api/latest/} + */ + tf: any; + /** Object containing environment information used for diagnostics */ + env: Env; + /** Draw helper classes that can draw detected objects on canvas using specified draw + * - canvas: draws input to canvas + * - options: are global settings for all draw operations, can be overriden for each draw method {@link DrawOptions} + * - face, body, hand, gesture, object, person: draws detected results as overlays on canvas + */ + draw: { + canvas: typeof draw.canvas; + face: typeof draw.face; + body: typeof draw.body; + hand: typeof draw.hand; + gesture: typeof draw.gesture; + object: typeof draw.object; + person: typeof draw.person; + all: typeof draw.all; + options: DrawOptions; + }; + /** Currently loaded models + * @internal + * {@link Models} + */ + models: models.Models; + /** Container for events dispatched by Human + * Possible events: + * - `create`: triggered when Human object is instantiated + * - `load`: triggered when models are loaded (explicitly or on-demand) + * - `image`: triggered when input image is processed + * - `result`: triggered when detection is complete + * - `warmup`: triggered when warmup is complete + * - `error`: triggered on some errors + */ + events: EventTarget | undefined; + /** Reference face triangualtion array of 468 points, used for triangle references between points */ + faceTriangulation: number[]; + /** Refernce UV map of 468 values, used for 3D mapping of the face mesh */ + faceUVMap: [number, number][]; + /** Performance object that contains values for all recently performed operations */ + performance: Record; + /** WebGL debug info */ + gl: Record; + /** Constructor for **Human** library that is futher used for all operations + * @param userConfig - user configuration object {@link Config} + */ + constructor(userConfig?: Partial); + /** internal function to measure tensor leaks */ + analyze: (...msg: string[]) => void; + /** Reset configuration to default values */ + reset(): void; + /** Validate current configuration schema */ + validate(userConfig?: Partial): { + reason: string; + where: string; + expected?: string; + }[]; + /** Exports face matching methods {@link match#similarity} */ + similarity: typeof match.similarity; + /** Exports face matching methods {@link match#distance} */ + distance: typeof match.distance; + /** Exports face matching methods {@link match#match} */ + match: typeof match.match; + /** Utility wrapper for performance.now() */ + now(): number; + /** Process input as return canvas and tensor + * + * @param input - any input {@link Input} + * @param getTensor - should image processing also return tensor or just canvas + * Returns object with `tensor` and `canvas` + */ + image(input: Input, getTensor?: boolean): Promise<{ + tensor: Tensor | null; + canvas: AnyCanvas | null; + }>; + /** Segmentation method takes any input and returns processed canvas with body segmentation + * - Segmentation is not triggered as part of detect process + * @param input - {@link Input} + * @param background - {@link Input} + * - Optional parameter background is used to fill the background with specific input + * Returns: + * - `data` as raw data array with per-pixel segmentation values + * - `canvas` as canvas which is input image filtered with segementation data and optionally merged with background image. canvas alpha values are set to segmentation values for easy merging + * - `alpha` as grayscale canvas that represents segmentation alpha values + */ + segmentation(input: Input, background?: Input): Promise<{ + data: number[] | Tensor; + canvas: AnyCanvas | null; + alpha: AnyCanvas | null; + }>; + /** Enhance method performs additional enhacements to face image previously detected for futher processing + * + * @param input - Tensor as provided in human.result.face[n].tensor + * @returns Tensor + */ + enhance(input: Tensor): Tensor | null; + /** Compare two input tensors for pixel simmilarity + * - use `human.image` to process any valid input and get a tensor that can be used for compare + * - when passing manually generated tensors: + * - both input tensors must be in format [1, height, width, 3] + * - if resolution of tensors does not match, second tensor will be resized to match resolution of the first tensor + * - return value is pixel similarity score normalized by input resolution and rgb channels + */ + compare(firstImageTensor: Tensor, secondImageTensor: Tensor): Promise; + /** Explicit backend initialization + * - Normally done implicitly during initial load phase + * - Call to explictly register and initialize TFJS backend without any other operations + * - Use when changing backend during runtime + */ + init(): Promise; + /** Load method preloads all configured models on-demand + * - Not explicitly required as any required model is load implicitly on it's first run + * + * @param userConfig - {@link Config} + */ + load(userConfig?: Partial): Promise; + /** emit event */ + emit: (event: string) => void; + /** Runs interpolation using last known result and returns smoothened result + * Interpolation is based on time since last known result so can be called independently + * + * @param result - {@link Result} optional use specific result set to run interpolation on + * @returns result - {@link Result} + */ + next(result?: Result): Result; + /** Warmup method pre-initializes all configured models for faster inference + * - can take significant time on startup + * - only used for `webgl` and `humangl` backends + * @param userConfig - {@link Config} + * @returns result - {@link Result} + */ + warmup(userConfig?: Partial): Promise; + /** Run detect with tensorflow profiling + * - result object will contain total exeuction time information for top-20 kernels + * - actual detection object can be accessed via `human.result` + */ + profile(input: Input, userConfig?: Partial): Promise>; + /** Main detection method + * - Analyze configuration: {@link Config} + * - Pre-process input: {@link Input} + * - Run inference for all configured models + * - Process and return result: {@link Result} + * + * @param input - {@link Input} + * @param userConfig - {@link Config} + * @returns result - {@link Result} + */ + detect(input: Input, userConfig?: Partial): Promise; +} +export { Human } +export default Human; + +/** Defines all possible image objects */ +export declare type ImageObjects = ImageData | ImageBitmap; + +/** + * Common interface for a machine learning model that can do inference. + */ +declare interface InferenceModel { + /** + * Return the array of input tensor info. + */ + readonly inputs: ModelTensorInfo[]; + /** + * Return the array of output tensor info. + */ + readonly outputs: ModelTensorInfo[]; + /** + * Execute the inference for the input tensors. + * + * @param input The input tensors, when there is single input for the model, + * inputs param should be a Tensor. For models with multiple inputs, inputs + * params should be in either Tensor[] if the input order is fixed, or + * otherwise NamedTensorMap format. + * For batch inference execution, the tensors for each input need to be + * concatenated together. For example with mobilenet, the required input shape + * is [1, 244, 244, 3], which represents the [batch, height, width, channel]. + * If we are provide a batched data of 100 images, the input tensor should be + * in the shape of [100, 244, 244, 3]. + * + * @param config Prediction configuration for specifying the batch size. + * + * @returns Inference result tensors. The output would be single Tensor if + * model has single output node, otherwise Tensor[] or NamedTensorMap[] will + * be returned for model with multiple outputs. + */ + predict(inputs: Tensor | Tensor[] | NamedTensorMap, config: ModelPredictConfig): Tensor | Tensor[] | NamedTensorMap; + /** + * Single Execute the inference for the input tensors and return activation + * values for specified output node names without batching. + * + * @param input The input tensors, when there is single input for the model, + * inputs param should be a Tensor. For models with multiple inputs, inputs + * params should be in either Tensor[] if the input order is fixed, or + * otherwise NamedTensorMap format. + * + * @param outputs string|string[]. List of output node names to retrieve + * activation from. + * + * @returns Activation values for the output nodes result tensors. The return + * type matches specified parameter outputs type. The output would be single + * Tensor if single output is specified, otherwise Tensor[] for multiple + * outputs. + */ + execute(inputs: Tensor | Tensor[] | NamedTensorMap, outputs: string | string[]): Tensor | Tensor[]; +} + +/** Defines all possible input types for **Human** detection */ +export declare type Input = Tensor | AnyCanvas | AnyImage | AnyVideo | ImageObjects | ExternalCanvas; + +declare namespace io { + export { + copyModel, + listModels, + moveModel, + removeModel, + browserFiles, + browserHTTPRequest, + concatenateArrayBuffers, + decodeWeights, + encodeWeights, + fromMemory, + getLoadHandlers, + getModelArtifactsForJSON, + getModelArtifactsInfoForJSON, + getSaveHandlers, + http, + IOHandler, + isHTTPScheme, + LoadHandler, + LoadOptions, + loadWeights, + ModelArtifacts, + ModelArtifactsInfo, + ModelJSON, + ModelStoreManager, + OnProgressCallback, + registerLoadRouter, + registerSaveRouter, + RequestDetails, + SaveConfig, + SaveHandler, + SaveResult, + TrainingConfig, + WeightGroup, + weightsLoaderFactory, + WeightsManifestConfig, + WeightsManifestEntry, + withSaveHandler + } +} + +/** + * Interface for a model import/export handler. + * + * The `save` and `load` handlers are both optional, in order to allow handlers + * that support only saving or loading. + */ +declare interface IOHandler { + save?: SaveHandler; + load?: LoadHandler; +} + +declare type IORouter = (url: string | string[], loadOptions?: LoadOptions) => IOHandler; + +/** iris gesture type */ +export declare type IrisGesture = 'facing center' | `looking ${'left' | 'right' | 'up' | 'down'}` | 'looking center'; + +declare function isHTTPScheme(url: string): boolean; + +/** + * List all models stored in registered storage mediums. + * + * For a web browser environment, the registered mediums are Local Storage and + * IndexedDB. + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Delete the model. + * await tf.io.removeModel('localstorage://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * ``` + * + * @returns A `Promise` of a dictionary mapping URLs of existing models to + * their model artifacts info. URLs include medium-specific schemes, e.g., + * 'indexeddb://my/model/1'. Model artifacts info include type of the + * model's topology, byte sizes of the topology, weights, etc. + * + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function listModels(): Promise<{ + [url: string]: ModelArtifactsInfo; +}>; + +/** Load method preloads all instance.configured models on-demand */ +declare function load(instance: Human): Promise; + +/** + * Type definition for handlers of loading operations. + */ +declare type LoadHandler = () => Promise; + +/** @innamespace io */ +declare interface LoadOptions { + /** + * RequestInit (options) for HTTP requests. + * + * For detailed information on the supported fields, see + * [https://developer.mozilla.org/en-US/docs/Web/API/Request/Request]( + * https://developer.mozilla.org/en-US/docs/Web/API/Request/Request) + */ + requestInit?: RequestInit; + /** + * Progress callback. + */ + onProgress?: OnProgressCallback; + /** + * A function used to override the `window.fetch` function. + */ + fetchFunc?: Function; + /** + * Strict loading model: whether extraneous weights or missing + * weights should trigger an `Error`. + * + * If `true`, require that the provided weights exactly match those + * required by the layers. `false` means that both extra weights + * and missing weights will be silently ignored. + * + * Default: `true`. + */ + strict?: boolean; + /** + * Path prefix for weight files, by default this is calculated from the + * path of the model JSON file. + * + * For instance, if the path to the model JSON file is + * `http://localhost/foo/model.json`, then the default path prefix will be + * `http://localhost/foo/`. If a weight file has the path value + * `group1-shard1of2` in the weight manifest, then the weight file will be + * loaded from `http://localhost/foo/group1-shard1of2` by default. However, + * if you provide a `weightPathPrefix` value of + * `http://localhost/foo/alt-weights`, then the weight file will be loaded + * from the path `http://localhost/foo/alt-weights/group1-shard1of2` instead. + */ + weightPathPrefix?: string; + /** + * Whether the module or model is to be loaded from TF Hub. + * + * Setting this to `true` allows passing a TF-Hub module URL, omitting the + * standard model file name and the query parameters. + * + * Default: `false`. + */ + fromTFHub?: boolean; + /** + * An async function to convert weight file name to URL. The weight file + * names are stored in model.json's weightsManifest.paths field. By default we + * consider weight files are colocated with the model.json file. For example: + * model.json URL: https://www.google.com/models/1/model.json + * group1-shard1of1.bin url: + * https://www.google.com/models/1/group1-shard1of1.bin + * + * With this func you can convert the weight file name to any URL. + */ + weightUrlConverter?: (weightFileName: string) => Promise; +} + +/** + * Reads a weights manifest JSON configuration, fetches the weights and + * returns them as `Tensor`s. + * + * @param manifest The weights manifest JSON. + * @param filePathPrefix The path prefix for filenames given in the manifest. + * Defaults to the empty string. + * @param weightNames The names of the weights to be fetched. + */ +declare function loadWeights(manifest: WeightsManifestConfig, filePathPrefix?: string, weightNames?: string[], requestInit?: RequestInit): Promise; + +declare namespace match { + export { + distance, + similarity, + match_2 as match, + Descriptor, + MatchOptions + } +} + +/** Matches given descriptor to a closest entry in array of descriptors + * @param descriptor - face descriptor + * @param descriptors - array of face descriptors to commpare given descriptor to + * @param options - see {@link similarity} + * Returns + * - `index` index array index where best match was found or -1 if no matches + * - {@link distance} calculated `distance` of given descriptor to the best match + * - {@link similarity} calculated normalized `similarity` of given descriptor to the best match + */ +declare function match_2(descriptor: Descriptor, descriptors: Array, options?: MatchOptions): { + index: number; + distance: number; + similarity: number; +}; + +declare type MatchOptions = { + order?: number; + threshold?: number; + multiplier?: number; + min?: number; + max?: number; +} | undefined; + +/** + * The serialized artifacts of a model, including topology and weights. + * + * The `modelTopology`, `trainingConfig`, `weightSpecs` and `weightData` fields + * of this interface are optional, in order to support topology- or weights-only + * saving and loading. + * + * Note this interface is used internally in IOHandlers. For the file format + * written to disk as `model.json`, see `ModelJSON`. + */ +declare interface ModelArtifacts { + /** + * Model topology. + * + * For Keras-style `tf.Model`s, this is a JSON object. + * For TensorFlow-style models (e.g., `SavedModel`), this is the JSON + * encoding of the `GraphDef` protocol buffer. + */ + modelTopology?: {} | ArrayBuffer; + /** + * Serialized configuration for the model's training. + */ + trainingConfig?: TrainingConfig; + /** + * Weight specifications. + * + * This corresponds to the weightsData below. + */ + weightSpecs?: WeightsManifestEntry[]; + /** + * Binary buffer for all weight values concatenated in the order specified + * by `weightSpecs`. + */ + weightData?: ArrayBuffer; + /** + * Hard-coded format name for models saved from TensorFlow.js or converted + * by TensorFlow.js Converter. + */ + format?: string; + /** + * What library is responsible for originally generating this artifact. + * + * Used for debugging purposes. E.g., 'TensorFlow.js v1.0.0'. + */ + generatedBy?: string; + /** + * What library or tool is responsible for converting the original model + * to this format, applicable only if the model is output by a converter. + * + * Used for debugging purposes. E.g., 'TensorFlow.js Converter v1.0.0'. + * + * A value of `null` means the model artifacts are generated without any + * conversion process (e.g., saved directly from a TensorFlow.js + * `tf.LayersModel` instance.) + */ + convertedBy?: string | null; + /** + * Inputs and outputs signature for saved model. + */ + signature?: {}; + /** + * User-defined metadata about the model. + */ + userDefinedMetadata?: { + [key: string]: {}; + }; + /** + * Initializer for the model. + */ + modelInitializer?: {}; +} + +declare interface ModelArtifactsInfo { + /** + * Timestamp for when the model is saved. + */ + dateSaved: Date; + /** + * TODO (cais,yassogba) consider removing GraphDef as GraphDefs now + * come in a JSON format and none of our IOHandlers support a non json + * format. We could conder replacing this with 'Binary' if we want to + * allow future handlers to save to non json formats (though they will + * probably want more information than 'Binary'). + * Type of the model topology + * + * Type of the model topology + * + * Possible values: + * - JSON: JSON config (human-readable, e.g., Keras JSON). + * - GraphDef: TensorFlow + * [GraphDef](https://www.tensorflow.org/extend/tool_developers/#graphdef) + * protocol buffer (binary). + */ + modelTopologyType: 'JSON' | 'GraphDef'; + /** + * Size of model topology (Keras JSON or GraphDef), in bytes. + */ + modelTopologyBytes?: number; + /** + * Size of weight specification or manifest, in bytes. + */ + weightSpecsBytes?: number; + /** + * Size of weight value data, in bytes. + */ + weightDataBytes?: number; +} + +/** + * The on-disk format of the `model.json` file. + * + * TF.js 1.0 always populates the optional fields when writing model.json. + * Prior versions did not provide those fields. + */ +declare interface ModelJSON { + /** + * Model topology. + * + * For Keras-style `tf.Model`s, this is a JSON object. + * For TensorFlow-style models (e.g., `SavedModel`), this is the JSON + * encoding of the `GraphDef` protocol buffer. + */ + modelTopology: {}; + /** Model training configuration. */ + trainingConfig?: TrainingConfig; + /** + * Weights manifest. + * + * The weights manifest consists of an ordered list of weight-manifest + * groups. Each weight-manifest group consists of a number of weight values + * stored in a number of paths. See the documentation of + * `WeightsManifestConfig` for more details. + */ + weightsManifest: WeightsManifestConfig; + /** + * Hard-coded format name for models saved from TensorFlow.js or converted + * by TensorFlow.js Converter. + */ + format?: string; + /** + * What library is responsible for originally generating this artifact. + * + * Used for debugging purposes. E.g., 'TensorFlow.js v1.0.0'. + */ + generatedBy?: string; + /** + * What library or tool is responsible for converting the original model + * to this format, applicable only if the model is output by a converter. + * + * Used for debugging purposes. E.g., 'TensorFlow.js Converter v1.0.0'. + * + * A value of `null` means the model artifacts are generated without any + * conversion process (e.g., saved directly from a TensorFlow.js + * `tf.LayersModel` instance.) + */ + convertedBy?: string | null; + /** + * Inputs and outputs signature for saved model. + */ + signature?: {}; + /** + * User-defined metadata about the model. + */ + userDefinedMetadata?: { + [key: string]: {}; + }; + /** + * Initializer for the model. + */ + modelInitializer?: {}; +} + +declare interface ModelPredictConfig { + /** + * Optional. Batch size (Integer). If unspecified, it will default to 32. + */ + batchSize?: number; + /** + * Optional. Verbosity mode. Defaults to false. + */ + verbose?: boolean; +} + +/** Instances of all possible TFJS Graph Models used by Human + * - loaded as needed based on configuration + * - initialized explictly with `human.load()` method + * - initialized implicity on first call to `human.detect()` + * - each model can be `null` if not loaded, instance of `GraphModel` if loaded or `Promise` if loading + */ +export declare class Models { + ssrnetage: null | GraphModel | Promise; + gear: null | GraphModel | Promise; + blazeposedetect: null | GraphModel | Promise; + blazepose: null | GraphModel | Promise; + centernet: null | GraphModel | Promise; + efficientpose: null | GraphModel | Promise; + mobilefacenet: null | GraphModel | Promise; + emotion: null | GraphModel | Promise; + facedetect: null | GraphModel | Promise; + faceiris: null | GraphModel | Promise; + facemesh: null | GraphModel | Promise; + faceres: null | GraphModel | Promise; + ssrnetgender: null | GraphModel | Promise; + handpose: null | GraphModel | Promise; + handskeleton: null | GraphModel | Promise; + handtrack: null | GraphModel | Promise; + liveness: null | GraphModel | Promise; + movenet: null | GraphModel | Promise; + nanodet: null | GraphModel | Promise; + posenet: null | GraphModel | Promise; + segmentation: null | GraphModel | Promise; + antispoof: null | GraphModel | Promise; +} + +declare namespace models { + export { + reset, + load, + validate, + Models + } +} + +/** + * An interface for the manager of a model store. + * + * A model store is defined as a storage medium on which multiple models can + * be stored. Each stored model has a unique `path` as its identifier. + * A `ModelStoreManager` for the store allows actions including + * + * - Listing the models stored in the store. + * - Deleting a model from the store. + */ +declare interface ModelStoreManager { + /** + * List all models in the model store. + * + * @returns A dictionary mapping paths of existing models to their + * model artifacts info. Model artifacts info include type of the model's + * topology, byte sizes of the topology, weights, etc. + */ + listModels(): Promise<{ + [path: string]: ModelArtifactsInfo; + }>; + /** + * Remove a model specified by `path`. + * + * @param path + * @returns ModelArtifactsInfo of the deleted model (if and only if deletion + * is successful). + * @throws Error if deletion fails, e.g., if no model exists at `path`. + */ + removeModel(path: string): Promise; +} + +/** + * Interface for model input/output tensor info. + */ +declare interface ModelTensorInfo { + name: string; + shape?: number[]; + dtype: DataType; + tfDtype?: string; +} + +/** + * Move a model from one URL to another. + * + * This function supports: + * + * 1. Moving within a storage medium, e.g., + * `tf.io.moveModel('localstorage://model-1', 'localstorage://model-2')` + * 2. Moving between two storage mediums, e.g., + * `tf.io.moveModel('localstorage://model-1', 'indexeddb://model-1')` + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Move the model, from Local Storage to IndexedDB. + * await tf.io.moveModel( + * 'localstorage://demo/management/model1', + * 'indexeddb://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Remove the moved model. + * await tf.io.removeModel('indexeddb://demo/management/model1'); + * ``` + * + * @param sourceURL Source URL of moving. + * @param destURL Destination URL of moving. + * @returns ModelArtifactsInfo of the copied model (if and only if copying + * is successful). + * @throws Error if moving fails, e.g., if no model exists at `sourceURL`, or + * if `oldPath` and `newPath` are identical. + * + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function moveModel(sourceURL: string, destURL: string): Promise; + +declare interface NamedTensor { + name: string; + tensor: Tensor; +} + +/** @docalias {[name: string]: Tensor} */ +declare type NamedTensorMap = { + [name: string]: Tensor; +}; + +declare type NamedTensorsMap = { + [key: string]: Tensor[]; +}; + +declare type NumericDataType = 'float32' | 'int32' | 'bool' | 'complex64'; + +/** draw detected objects */ +declare function object(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +/** Configures all object detection specific options */ +export declare interface ObjectConfig extends GenericConfig { + /** minimum confidence for a detected objects before results are discarded */ + minConfidence: number; + /** minimum overlap between two detected objects before one is discarded */ + iouThreshold: number; + /** maximum number of detected objects */ + maxDetected: number; +} + +/** Object results */ +export declare interface ObjectResult { + /** object id */ + id: number; + /** object detection score */ + score: number; + /** detected object class id */ + class: number; + /** detected object class name */ + label: ObjectType; + /** detected object box */ + box: Box; + /** detected object box normalized to 0..1 */ + boxRaw: Box; +} + +export declare type ObjectType = 'person' | 'bicycle' | 'car' | 'motorcycle' | 'airplane' | 'bus' | 'train' | 'truck' | 'boat' | 'traffic light' | 'fire hydrant' | 'stop sign' | 'parking meter' | 'bench' | 'bird' | 'cat' | 'dog' | 'horse' | 'sheep' | 'cow' | 'elephant' | 'bear' | 'zebra' | 'giraffe' | 'backpack' | 'umbrella' | 'handbag' | 'tie' | 'suitcase' | 'frisbee' | 'skis' | 'snowboard' | 'sports ball' | 'kite' | 'baseball bat' | 'baseball glove' | 'skateboard' | 'surfboard' | 'tennis racket' | 'bottle' | 'wine glass' | 'cup' | 'fork' | 'knife' | 'spoon' | 'bowl' | 'banana' | 'apple' | 'sandwich' | 'orange' | 'broccoli' | 'carrot' | 'hot dog' | 'pizza' | 'donut' | 'cake' | 'chair' | 'couch' | 'potted plant' | 'bed' | 'dining table' | 'toilet' | 'tv' | 'laptop' | 'mouse' | 'remote' | 'keyboard' | 'cell phone' | 'microwave' | 'oven' | 'toaster' | 'sink' | 'refrigerator' | 'book' | 'clock' | 'vase' | 'scissors' | 'teddy bear' | 'hair drier' | 'toothbrush'; + +/** + * Callback for the progress of a long-running action such as an HTTP + * request for a large binary object. + * + * `fraction` should be a number in the [0, 1] interval, indicating how + * much of the action has completed. + */ +declare type OnProgressCallback = (fraction: number) => void; + +/** currently set draw options {@link DrawOptions} */ +declare const options: DrawOptions; + +/** draw combined person results instead of individual detection result objects */ +declare function person(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +/** Person getter + * - Triggers combining all individual results into a virtual person object + */ +export declare interface PersonResult { + /** person id */ + id: number; + /** face result that belongs to this person */ + face: FaceResult; + /** body result that belongs to this person */ + body: BodyResult | null; + /** left and right hand results that belong to this person */ + hands: { + left: HandResult | null; + right: HandResult | null; + }; + /** detected gestures specific to this person */ + gestures: Array; + /** box that defines the person */ + box: Box; + /** box that defines the person normalized to 0..1 */ + boxRaw?: Box; +} + +/** generic point as [x, y, z?] */ +export declare type Point = [number, number, number?]; + +export declare type Race = 'white' | 'black' | 'asian' | 'indian' | 'other'; + +export declare enum Rank { + R0 = "R0", + R1 = "R1", + R2 = "R2", + R3 = "R3", + R4 = "R4", + R5 = "R5", + R6 = "R6" +} + +declare interface RecursiveArray { + [index: number]: T | RecursiveArray; +} + +declare const registerLoadRouter: (loudRouter: IORouter) => void; + +declare const registerSaveRouter: (loudRouter: IORouter) => void; + +/** + * Remove a model specified by URL from a reigstered storage medium. + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Delete the model. + * await tf.io.removeModel('localstorage://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * ``` + * + * @param url A URL to a stored model, with a scheme prefix, e.g., + * 'localstorage://my-model-1', 'indexeddb://my/model/2'. + * @returns ModelArtifactsInfo of the deleted model (if and only if deletion + * is successful). + * @throws Error if deletion fails, e.g., if no model exists at `path`. + * + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function removeModel(url: string): Promise; + +/** + * Additional options for Platform.fetch + */ +declare interface RequestDetails { + /** + * Is this request for a binary file (as opposed to a json file) + */ + isBinary?: boolean; +} + +declare function reset(instance: Human): void; + +/** + * Result interface definition for **Human** library + * + * Contains all possible detection results + */ +export declare interface Result { + /** {@link FaceResult}: detection & analysis results */ + face: Array; + /** {@link BodyResult}: detection & analysis results */ + body: Array; + /** {@link HandResult}: detection & analysis results */ + hand: Array; + /** {@link GestureResult}: detection & analysis results */ + gesture: Array; + /** {@link ObjectResult}: detection & analysis results */ + object: Array; + /** global performance object with timing values for each operation */ + performance: Record; + /** optional processed canvas that can be used to draw input on screen */ + canvas?: AnyCanvas | null; + /** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */ + readonly timestamp: number; + /** getter property that returns unified persons object */ + persons: Array; + /** Last known error message */ + error: string | null; +} + +/** + * Options for saving a model. + * @innamespace io + */ +declare interface SaveConfig { + /** + * Whether to save only the trainable weights of the model, ignoring the + * non-trainable ones. + */ + trainableOnly?: boolean; + /** + * Whether the optimizer will be saved (if exists). + * + * Default: `false`. + */ + includeOptimizer?: boolean; +} + +/** + * Type definition for handlers of saving operations. + */ +declare type SaveHandler = (modelArtifact: ModelArtifacts) => Promise; + +/** + * Result of a saving operation. + */ +declare interface SaveResult { + /** + * Information about the model artifacts saved. + */ + modelArtifactsInfo: ModelArtifactsInfo; + /** + * HTTP responses from the server that handled the model-saving request (if + * any). This is applicable only to server-based saving routes. + */ + responses?: Response[]; + /** + * Error messages and related data (if any). + */ + errors?: Array<{} | string>; +} + +/** Configures all body segmentation module + * removes background from input containing person + * if segmentation is enabled it will run as preprocessing task before any other model + * alternatively leave it disabled and use it on-demand using human.segmentation method which can + * remove background or replace it with user-provided background + */ +export declare interface SegmentationConfig extends GenericConfig { + /** blur segmentation output by pixels for more realistic image */ + blur: number; +} + +/** + * @license + * Copyright 2017 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +/// +/** @docalias number[] */ +declare interface ShapeMap { + R0: number[]; + R1: [number]; + R2: [number, number]; + R3: [number, number, number]; + R4: [number, number, number, number]; + R5: [number, number, number, number, number]; + R6: [number, number, number, number, number, number]; +} + +/** Calculates normalized similarity between two face descriptors based on their `distance` + * @param options - calculation options + * - order - algorithm to use + * Euclidean distance if `order` is 2 (default), Minkowski distance algorithm of nth order if `order` is higher than 2 + * - multiplier - by how much to enhance difference analysis in range of 1..100 + * default is 20 which normalizes results to similarity above 0.5 can be considered a match + * - min - normalize similarity result to a given range + * - max - normalzie similarity resutl to a given range + * default is 0.2...0.8 + * Returns similarity between two face descriptors normalized to 0..1 range where 0 is no similarity and 1 is perfect similarity + */ +declare function similarity(descriptor1: Descriptor, descriptor2: Descriptor, options?: MatchOptions): number; + +declare interface SingleValueMap { + bool: boolean; + int32: number; + float32: number; + complex64: number; + string: string; +} + +export declare namespace Tensor { } + +/** + * A `tf.Tensor` object represents an immutable, multidimensional array of + * numbers that has a shape and a data type. + * + * For performance reasons, functions that create tensors do not necessarily + * perform a copy of the data passed to them (e.g. if the data is passed as a + * `Float32Array`), and changes to the data will change the tensor. This is not + * a feature and is not supported. To avoid this behavior, use the tensor before + * changing the input data or create a copy with `copy = tf.add(yourTensor, 0)`. + * + * See `tf.tensor` for details on how to create a `tf.Tensor`. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ +export declare class Tensor { + /** Unique id of this tensor. */ + readonly id: number; + /** + * Id of the bucket holding the data for this tensor. Multiple arrays can + * point to the same bucket (e.g. when calling array.reshape()). + */ + dataId: DataId; + /** The shape of the tensor. */ + readonly shape: ShapeMap[R]; + /** Number of elements in the tensor. */ + readonly size: number; + /** The data type for the array. */ + readonly dtype: DataType; + /** The rank type for the array (see `Rank` enum). */ + readonly rankType: R; + /** Whether this tensor has been globally kept. */ + kept: boolean; + /** The id of the scope this tensor is being tracked in. */ + scopeId: number; + /** + * Number of elements to skip in each dimension when indexing. See + * https://docs.scipy.org/doc/numpy/reference/generated/\ + * numpy.ndarray.strides.html + */ + readonly strides: number[]; + constructor(shape: ShapeMap[R], dtype: DataType, dataId: DataId, id: number); + readonly rank: number; + /** + * Returns a promise of `tf.TensorBuffer` that holds the underlying data. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + buffer(): Promise>; + /** + * Returns a `tf.TensorBuffer` that holds the underlying data. + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + bufferSync(): TensorBuffer; + /** + * Returns the tensor data as a nested array. The transfer of data is done + * asynchronously. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + array(): Promise; + /** + * Returns the tensor data as a nested array. The transfer of data is done + * synchronously. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + arraySync(): ArrayMap[R]; + /** + * Asynchronously downloads the values from the `tf.Tensor`. Returns a + * promise of `TypedArray` that resolves when the computation has finished. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + data(): Promise; + /** + * Copy the tensor's data to a new GPU resource. Comparing to the `dataSync()` + * and `data()`, this method prevents data from being downloaded to CPU. + * + * For WebGL backend, the data will be stored on a densely packed texture. + * This means that the texture will use the RGBA channels to store value. + * + * @param options: + * For WebGL, + * - customTexShape: Optional. If set, will use the user defined + * texture shape to create the texture. + * + * @returns For WebGL backend, a GPUData contains the new texture and + * its information. + * { + * tensorRef: The tensor that is associated with this texture, + * texture: WebGLTexture, + * texShape: [number, number] // [height, width] + * } + * Remember to dispose the GPUData after it is used by + * `res.tensorRef.dispose()`. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + dataToGPU(options?: DataToGPUOptions): GPUData; + /** + * Synchronously downloads the values from the `tf.Tensor`. This blocks the + * UI thread until the values are ready, which can cause performance issues. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + dataSync(): DataTypeMap[D]; + /** Returns the underlying bytes of the tensor's data. */ + bytes(): Promise; + /** + * Disposes `tf.Tensor` from memory. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + dispose(): void; + protected isDisposedInternal: boolean; + readonly isDisposed: boolean; + throwIfDisposed(): void; + /** + * Prints the `tf.Tensor`. See `tf.print` for details. + * + * @param verbose Whether to print verbose information about the tensor, + * including dtype and size. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + print(verbose?: boolean): void; + /** + * Returns a copy of the tensor. See `tf.clone` for details. + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + clone(this: T): T; + /** + * Returns a human-readable description of the tensor. Useful for logging. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + toString(verbose?: boolean): string; + variable(trainable?: boolean, name?: string, dtype?: DataType): Variable; +} + +/** + * A mutable object, similar to `tf.Tensor`, that allows users to set values + * at locations before converting to an immutable `tf.Tensor`. + * + * See `tf.buffer` for creating a tensor buffer. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ +declare class TensorBuffer { + dtype: D; + size: number; + shape: ShapeMap[R]; + strides: number[]; + values: DataTypeMap[D]; + constructor(shape: ShapeMap[R], dtype: D, values?: DataTypeMap[D]); + /** + * Sets a value in the buffer at a given location. + * + * @param value The value to set. + * @param locs The location indices. + * + * @doc {heading: 'Tensors', subheading: 'Creation'} + */ + set(value: SingleValueMap[D], ...locs: number[]): void; + /** + * Returns the value in the buffer at the provided location. + * + * @param locs The location indices. + * + * @doc {heading: 'Tensors', subheading: 'Creation'} + */ + get(...locs: number[]): SingleValueMap[D]; + locToIndex(locs: number[]): number; + indexToLoc(index: number): number[]; + readonly rank: number; + /** + * Creates an immutable `tf.Tensor` object from the buffer. + * + * @doc {heading: 'Tensors', subheading: 'Creation'} + */ + toTensor(): Tensor; +} + +declare interface TensorInfo { + name: string; + shape?: number[]; + dtype: DataType; +} + +/** @docalias TypedArray|Array */ +export declare type TensorLike = TypedArray | number | boolean | string | RecursiveArray | RecursiveArray | RecursiveArray | Uint8Array[]; + +/** Model training configuration. */ +declare interface TrainingConfig { + /** Optimizer used for the model training. */ + optimizer_config: {}; + /** Loss function(s) for the model's output(s). */ + loss: string | string[] | { + [key: string]: string; + }; + /** Metric function(s) for the model's output(s). */ + metrics?: string[] | { + [key: string]: string; + }; + weighted_metrics?: string[]; + sample_weight_mode?: string; + loss_weights?: number[] | { + [key: string]: number; + }; +} + +declare type TypedArray = Float32Array | Int32Array | Uint8Array; + +declare function validate(instance: Human): Promise; + +/** + * A mutable `tf.Tensor`, useful for persisting state, e.g. for training. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ +declare class Variable extends Tensor { + trainable: boolean; + name: string; + constructor(initialValue: Tensor, trainable: boolean, name: string, tensorId: number); + /** + * Assign a new `tf.Tensor` to this variable. The new `tf.Tensor` must have + * the same shape and dtype as the old `tf.Tensor`. + * + * @param newValue New tensor to be assigned to this variable. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + assign(newValue: Tensor): void; + dispose(): void; +} + +/** Possible values for `human.warmup` */ +export declare type WarmupType = ['' | 'none' | 'face' | 'full' | 'body']; + +/** + * Group to which the weight belongs. + * + * - 'optimizer': Weight from a stateful optimizer. + */ +declare type WeightGroup = 'model' | 'optimizer'; + +/** + * Creates a function, which reads a weights manifest JSON configuration, + * fetches the weight files using the specified function and returns them as + * `Tensor`s. + * + * ```js + * // example for creating a nodejs weight loader, which reads the weight files + * // from disk using fs.readFileSync + * + * import * as fs from 'fs' + * + * const fetchWeightsFromDisk = (filePaths: string[]) => + * filePaths.map(filePath => fs.readFileSync(filePath).buffer) + * + * const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk) + * + * const manifest = JSON.parse( + * fs.readFileSync('./my_model-weights_manifest').toString() + * ) + * const weightMap = await loadWeights(manifest, './') + * ``` + * @param fetchWeightsFunction The function used for fetching the weight files. + * @returns Weight loading function. + */ +declare function weightsLoaderFactory(fetchWeightsFunction: (fetchUrls: string[]) => Promise): (manifest: WeightsManifestConfig, filePathPrefix?: string, weightNames?: string[]) => Promise; + +/** + * A weight manifest. + * + * The weight manifest consists of an ordered list of weight-manifest groups. + * Each weight-manifest group ("group" for short hereafter) consists of a + * number of weight values stored in a number of paths. + * See the documentation of `WeightManifestGroupConfig` below for more details. + */ +declare type WeightsManifestConfig = WeightsManifestGroupConfig[]; + +/** + * An entry in the weight manifest. + * + * The entry contains specification of a weight. + */ +declare interface WeightsManifestEntry { + /** + * Name of the weight, e.g., 'Dense_1/bias' + */ + name: string; + /** + * Shape of the weight. + */ + shape: number[]; + /** + * Data type of the weight. + */ + dtype: 'float32' | 'int32' | 'bool' | 'string' | 'complex64'; + /** + * Type of the weight. + * + * Optional. + * + * The value 'optimizer' indicates the weight belongs to an optimizer + * (i.e., used only during model training and not during inference). + */ + group?: WeightGroup; + /** + * Information for dequantization of the weight. + */ + quantization?: { + scale?: number; + min?: number; + dtype: 'uint16' | 'uint8' | 'float16'; + }; +} + +/** + * A weight-manifest group. + * + * Consists of an ordered list of weight values encoded in binary format, + * stored in an ordered list of paths. + */ +declare interface WeightsManifestGroupConfig { + /** + * An ordered list of paths. + * + * Paths are intentionally abstract in order to be general. For example, they + * can be relative URL paths or relative paths on the file system. + */ + paths: string[]; + /** + * Specifications of the weights stored in the paths. + */ + weights: WeightsManifestEntry[]; +} + +/** + * Creates an IOHandler that passes saved model artifacts to a callback. + * + * ```js + * function handleSave(artifacts) { + * // ... do something with the artifacts ... + * return {modelArtifactsInfo: {...}, ...}; + * } + * + * const saveResult = model.save(tf.io.withSaveHandler(handleSave)); + * ``` + * + * @param saveHandler A function that accepts a `ModelArtifacts` and returns a + * `SaveResult`. + */ +declare function withSaveHandler(saveHandler: (artifacts: ModelArtifacts) => Promise): IOHandler; + +export { } diff --git a/dist/human.esm-nobundle.js b/dist/human.esm-nobundle.js new file mode 100644 index 00000000..1322a1e6 --- /dev/null +++ b/dist/human.esm-nobundle.js @@ -0,0 +1,839 @@ +/* + Human + homepage: + author: ' +*/ + +var kt=Object.defineProperty;var rr=Object.getOwnPropertyDescriptor;var sr=Object.getOwnPropertyNames;var ar=Object.prototype.hasOwnProperty;var ir=(e,t,o)=>t in e?kt(e,t,{enumerable:!0,configurable:!0,writable:!0,value:o}):e[t]=o;var Ze=(e,t)=>{for(var o in t)kt(e,o,{get:t[o],enumerable:!0})},Z=(e,t,o,A)=>{if(t&&typeof t=="object"||typeof t=="function")for(let n of sr(t))!ar.call(e,n)&&(o||n!=="default")&&kt(e,n,{get:()=>t[n],enumerable:!(A=rr(t,n))||A.enumerable});return e};var w=(e,t,o)=>(ir(e,typeof t!="symbol"?t+"":t,o),o),yo=(e,t,o)=>{if(!t.has(e))throw TypeError("Cannot "+o)};var ke=(e,t,o)=>(yo(e,t,"read from private field"),o?o.call(e):t.get(e)),Ee=(e,t,o)=>{if(t.has(e))throw TypeError("Cannot add the same private member more than once");t instanceof WeakSet?t.add(e):t.set(e,o)},ze=(e,t,o,A)=>(yo(e,t,"write to private field"),A?A.call(e,o):t.set(e,o),o);function g(...e){let t=new Date,o=`${t.getHours().toString().padStart(2,"0")}:${t.getMinutes().toString().padStart(2,"0")}:${t.getSeconds().toString().padStart(2,"0")}.${t.getMilliseconds().toString().padStart(3,"0")}`;e&&console.log(o,"Human:",...e)}function xo(e,t){let o=e.endsWith("/")?"":"/",n=t.startsWith(".")||t.startsWith("/")||t.startsWith("http:")||t.startsWith("https:")||t.startsWith("file:")?`${t}`:`${e}${o}${t}`;if(!n.toLocaleLowerCase().includes(".json"))throw new Error(`modelpath error: expecting json file: ${n}`);return n}var M=()=>typeof performance!="undefined"?performance.now():parseInt((Number(process.hrtime.bigint())/1e3/1e3).toString());function Et(e,t,o="config",A=[]){for(let n of Object.keys(t))if(typeof t[n]=="object")Et(e[n],t[n],n,A);else{let s=e&&typeof e[n]!="undefined";s||A.push({reason:"unknown property",where:`${o}.${n} = ${t[n]}`});let a=e&&typeof e[n]==typeof t[n];s&&!a&&A.push({reason:"property type mismatch",where:`${o}.${n} = ${t[n]}`,expected:typeof e[n]})}return t.debug&&o==="config"&&A.length>0&&g("invalid configuration",A),A}function o0(...e){let t=o=>o&&typeof o=="object";return e.reduce((o,A)=>(Object.keys(A||{}).forEach(n=>{let s=o[n],a=A[n];Array.isArray(s)&&Array.isArray(a)?o[n]=s.concat(...a):t(s)&&t(a)?o[n]=o0(s,a):o[n]=a}),o),{})}var Y0={backend:"",modelBasePath:"",cacheModels:!0,wasmPath:"",debug:!0,async:!0,warmup:"full",cacheSensitivity:.7,skipAllowed:!1,deallocate:!1,filter:{enabled:!0,equalization:!1,width:0,height:0,flip:!1,return:!0,brightness:0,contrast:0,sharpness:0,blur:0,saturation:0,hue:0,negative:!1,sepia:!1,vintage:!1,kodachrome:!1,technicolor:!1,polaroid:!1,pixelate:0},gesture:{enabled:!0},face:{enabled:!0,detector:{modelPath:"blazeface.json",rotation:!0,maxDetected:1,skipFrames:99,skipTime:2500,minConfidence:.2,iouThreshold:.1,mask:!1,return:!1},mesh:{enabled:!0,modelPath:"facemesh.json"},iris:{enabled:!0,modelPath:"iris.json"},emotion:{enabled:!0,minConfidence:.1,skipFrames:99,skipTime:1500,modelPath:"emotion.json"},description:{enabled:!0,modelPath:"faceres.json",skipFrames:99,skipTime:3e3,minConfidence:.1},antispoof:{enabled:!1,skipFrames:99,skipTime:4e3,modelPath:"antispoof.json"},liveness:{enabled:!1,skipFrames:99,skipTime:4e3,modelPath:"liveness.json"}},body:{enabled:!0,modelPath:"movenet-lightning.json",maxDetected:-1,minConfidence:.3,skipFrames:1,skipTime:200},hand:{enabled:!0,rotation:!0,skipFrames:99,skipTime:1e3,minConfidence:.5,iouThreshold:.2,maxDetected:-1,landmarks:!0,detector:{modelPath:"handtrack.json"},skeleton:{modelPath:"handlandmark-full.json"}},object:{enabled:!1,modelPath:"mb3-centernet.json",minConfidence:.2,iouThreshold:.4,maxDetected:10,skipFrames:99,skipTime:2e3},segmentation:{enabled:!1,modelPath:"selfie.json",blur:8}};var r={};Ze(r,{GraphModel:()=>zt,Tensor:()=>se,version:()=>je});Z(r,es);Z(r,ts);import*as es from"@tensorflow/tfjs/dist/index.js";import*as ts from"@tensorflow/tfjs-backend-webgl/dist/index.js";import{Tensor as se}from"@tensorflow/tfjs/dist/index.js";import{GraphModel as zt}from"@tensorflow/tfjs-converter/dist/index";var lr="3.13.0",yr="3.13.0",xr="3.13.0",cr="3.13.0",dr="3.13.0",fr="3.13.0",mr="3.13.0",pr="3.13.0",je={tfjs:lr,"tfjs-core":yr,"tfjs-data":xr,"tfjs-layers":cr,"tfjs-converter":dr,"tfjs-backend-cpu":fr,"tfjs-backend-webgl":mr,"tfjs-backend-wasm":pr};var co=` + precision highp float; + attribute vec2 pos; + attribute vec2 uv; + varying vec2 vUv; + uniform float flipY; + void main(void) { + vUv = uv; + gl_Position = vec4(pos.x, pos.y*flipY, 0.0, 1.); + } +`;var fo=` + precision highp float; + varying vec2 vUv; + uniform sampler2D texture; + uniform float m[20]; + void main(void) { + vec4 c = texture2D(texture, vUv); + gl_FragColor.r = m[0] * c.r + m[1] * c.g + m[2] * c.b + m[3] * c.a + m[4]; + gl_FragColor.g = m[5] * c.r + m[6] * c.g + m[7] * c.b + m[8] * c.a + m[9]; + gl_FragColor.b = m[10] * c.r + m[11] * c.g + m[12] * c.b + m[13] * c.a + m[14]; + gl_FragColor.a = m[15] * c.r + m[16] * c.g + m[17] * c.b + m[18] * c.a + m[19]; + } +`,mo=` + precision highp float; + varying vec2 vUv; + uniform sampler2D texture; + uniform float m[20]; + void main(void) { + vec4 c = texture2D(texture, vUv); + gl_FragColor.r = m[0] * c.r + m[1] * c.g + m[2] * c.b + m[4]; + gl_FragColor.g = m[5] * c.r + m[6] * c.g + m[7] * c.b + m[9]; + gl_FragColor.b = m[10] * c.r + m[11] * c.g + m[12] * c.b + m[14]; + gl_FragColor.a = c.a; + } +`,po=` + precision highp float; + varying vec2 vUv; + uniform vec2 size; + uniform sampler2D texture; + vec2 pixelate(vec2 coord, vec2 size) { + return floor( coord / size ) * size; + } + void main(void) { + gl_FragColor = vec4(0.0); + vec2 coord = pixelate(vUv, size); + gl_FragColor += texture2D(texture, coord); + } +`,uo=` + precision highp float; + varying vec2 vUv; + uniform sampler2D texture; + uniform vec2 px; + void main(void) { + gl_FragColor = vec4(0.0); + gl_FragColor += texture2D(texture, vUv + vec2(-7.0*px.x, -7.0*px.y))*0.0044299121055113265; + gl_FragColor += texture2D(texture, vUv + vec2(-6.0*px.x, -6.0*px.y))*0.00895781211794; + gl_FragColor += texture2D(texture, vUv + vec2(-5.0*px.x, -5.0*px.y))*0.0215963866053; + gl_FragColor += texture2D(texture, vUv + vec2(-4.0*px.x, -4.0*px.y))*0.0443683338718; + gl_FragColor += texture2D(texture, vUv + vec2(-3.0*px.x, -3.0*px.y))*0.0776744219933; + gl_FragColor += texture2D(texture, vUv + vec2(-2.0*px.x, -2.0*px.y))*0.115876621105; + gl_FragColor += texture2D(texture, vUv + vec2(-1.0*px.x, -1.0*px.y))*0.147308056121; + gl_FragColor += texture2D(texture, vUv )*0.159576912161; + gl_FragColor += texture2D(texture, vUv + vec2( 1.0*px.x, 1.0*px.y))*0.147308056121; + gl_FragColor += texture2D(texture, vUv + vec2( 2.0*px.x, 2.0*px.y))*0.115876621105; + gl_FragColor += texture2D(texture, vUv + vec2( 3.0*px.x, 3.0*px.y))*0.0776744219933; + gl_FragColor += texture2D(texture, vUv + vec2( 4.0*px.x, 4.0*px.y))*0.0443683338718; + gl_FragColor += texture2D(texture, vUv + vec2( 5.0*px.x, 5.0*px.y))*0.0215963866053; + gl_FragColor += texture2D(texture, vUv + vec2( 6.0*px.x, 6.0*px.y))*0.00895781211794; + gl_FragColor += texture2D(texture, vUv + vec2( 7.0*px.x, 7.0*px.y))*0.0044299121055113265; + } +`,ho=` + precision highp float; + varying vec2 vUv; + uniform sampler2D texture; + uniform vec2 px; + uniform float m[9]; + void main(void) { + vec4 c11 = texture2D(texture, vUv - px); // top left + vec4 c12 = texture2D(texture, vec2(vUv.x, vUv.y - px.y)); // top center + vec4 c13 = texture2D(texture, vec2(vUv.x + px.x, vUv.y - px.y)); // top right + vec4 c21 = texture2D(texture, vec2(vUv.x - px.x, vUv.y) ); // mid left + vec4 c22 = texture2D(texture, vUv); // mid center + vec4 c23 = texture2D(texture, vec2(vUv.x + px.x, vUv.y) ); // mid right + vec4 c31 = texture2D(texture, vec2(vUv.x - px.x, vUv.y + px.y) ); // bottom left + vec4 c32 = texture2D(texture, vec2(vUv.x, vUv.y + px.y) ); // bottom center + vec4 c33 = texture2D(texture, vUv + px ); // bottom right + gl_FragColor = + c11 * m[0] + c12 * m[1] + c22 * m[2] + + c21 * m[3] + c22 * m[4] + c23 * m[5] + + c31 * m[6] + c32 * m[7] + c33 * m[8]; + gl_FragColor.a = c22.a; + } +`;var jt=(e,t,o)=>{let A=new RegExp("\\b"+t+" \\w+ (\\w+)","ig");e.replace(A,(n,s)=>(o[s]=0,n))},bo=class{constructor(t,o,A){w(this,"uniform",{});w(this,"attribute",{});w(this,"gl");w(this,"id");w(this,"compile",(t,o)=>{let A=this.gl.createShader(o);return A?(this.gl.shaderSource(A,t),this.gl.compileShader(A),this.gl.getShaderParameter(A,this.gl.COMPILE_STATUS)?A:(g(`filter: gl compile failed: ${this.gl.getShaderInfoLog(A)}`),null)):(g("filter: could not create shader"),null)});this.gl=t;let n=this.compile(o,this.gl.VERTEX_SHADER),s=this.compile(A,this.gl.FRAGMENT_SHADER);if(this.id=this.gl.createProgram(),!(!n||!s)){if(!this.id){g("filter: could not create webgl program");return}if(this.gl.attachShader(this.id,n),this.gl.attachShader(this.id,s),this.gl.linkProgram(this.id),!this.gl.getProgramParameter(this.id,this.gl.LINK_STATUS)){g(`filter: gl link failed: ${this.gl.getProgramInfoLog(this.id)}`);return}this.gl.useProgram(this.id),jt(o,"attribute",this.attribute);for(let a in this.attribute)this.attribute[a]=this.gl.getAttribLocation(this.id,a);jt(o,"uniform",this.uniform),jt(A,"uniform",this.uniform);for(let a in this.uniform)this.uniform[a]=this.gl.getUniformLocation(this.id,a)}}};function go(){let e=0,t=null,o=!1,A=-1,n=[null,null],s=[],a=null,i=null,x=s0(100,100),d={},l={INTERMEDIATE:1},y=x.getContext("webgl");if(this.gl=y,!y){g("filter: cannot get webgl context");return}function c(P,p){if(!(P===x.width&&p===x.height)){if(x.width=P,x.height=p,!a){let u=new Float32Array([-1,-1,0,1,1,-1,1,1,-1,1,0,0,-1,1,0,0,1,-1,1,1,1,1,1,0]);a=y.createBuffer(),y.bindBuffer(y.ARRAY_BUFFER,a),y.bufferData(y.ARRAY_BUFFER,u,y.STATIC_DRAW),y.pixelStorei(y.UNPACK_PREMULTIPLY_ALPHA_WEBGL,!0)}y.viewport(0,0,x.width,x.height),n=[null,null]}}function f(P,p){let u=y.createFramebuffer();y.bindFramebuffer(y.FRAMEBUFFER,u);let E=y.createRenderbuffer();y.bindRenderbuffer(y.RENDERBUFFER,E);let W=y.createTexture();return y.bindTexture(y.TEXTURE_2D,W),y.texImage2D(y.TEXTURE_2D,0,y.RGBA,P,p,0,y.RGBA,y.UNSIGNED_BYTE,null),y.texParameteri(y.TEXTURE_2D,y.TEXTURE_MAG_FILTER,y.LINEAR),y.texParameteri(y.TEXTURE_2D,y.TEXTURE_MIN_FILTER,y.LINEAR),y.texParameteri(y.TEXTURE_2D,y.TEXTURE_WRAP_S,y.CLAMP_TO_EDGE),y.texParameteri(y.TEXTURE_2D,y.TEXTURE_WRAP_T,y.CLAMP_TO_EDGE),y.framebufferTexture2D(y.FRAMEBUFFER,y.COLOR_ATTACHMENT0,y.TEXTURE_2D,W,0),y.bindTexture(y.TEXTURE_2D,null),y.bindFramebuffer(y.FRAMEBUFFER,null),{fbo:u,texture:W}}function h(P){return n[P]=n[P]||f(x.width,x.height),n[P]}function m(P=0){if(!i)return;let p=null,u=null,E=!1;e===0?p=t:p=h(A).texture||null,e++,o&&!(P&l.INTERMEDIATE)?(u=null,E=e%2===0):(A=(A+1)%2,u=h(A).fbo||null),y.bindTexture(y.TEXTURE_2D,p),y.bindFramebuffer(y.FRAMEBUFFER,u),y.uniform1f(i.uniform.flipY,E?-1:1),y.drawArrays(y.TRIANGLES,0,6)}function v(P){if(d[P])return i=d[P],y.useProgram((i?i.id:null)||null),i;if(i=new bo(y,co,P),!i)return g("filter: could not get webgl program"),null;let p=Float32Array.BYTES_PER_ELEMENT,u=4*p;return y.enableVertexAttribArray(i.attribute.pos),y.vertexAttribPointer(i.attribute.pos,2,y.FLOAT,!1,u,0*p),y.enableVertexAttribArray(i.attribute.uv),y.vertexAttribPointer(i.attribute.uv,2,y.FLOAT,!1,u,2*p),d[P]=i,i}let b={colorMatrix:P=>{let p=new Float32Array(P);p[4]/=255,p[9]/=255,p[14]/=255,p[19]/=255;let u=p[18]===1&&p[3]===0&&p[8]===0&&p[13]===0&&p[15]===0&&p[16]===0&&p[17]===0&&p[19]===0?mo:fo,E=v(u);!E||(y.uniform1fv(E.uniform.m,p),m())},brightness:P=>{let p=(P||0)+1;b.colorMatrix([p,0,0,0,0,0,p,0,0,0,0,0,p,0,0,0,0,0,1,0])},saturation:P=>{let p=(P||0)*2/3+1,u=(p-1)*-.5;b.colorMatrix([p,u,u,0,0,u,p,u,0,0,u,u,p,0,0,0,0,0,1,0])},desaturate:()=>{b.saturation(-1)},contrast:P=>{let p=(P||0)+1,u=-128*(p-1);b.colorMatrix([p,0,0,0,u,0,p,0,0,u,0,0,p,0,u,0,0,0,1,0])},negative:()=>{b.contrast(-2)},hue:P=>{P=(P||0)/180*Math.PI;let p=Math.cos(P),u=Math.sin(P),E=.213,W=.715,C=.072;b.colorMatrix([E+p*(1-E)+u*-E,W+p*-W+u*-W,C+p*-C+u*(1-C),0,0,E+p*-E+u*.143,W+p*(1-W)+u*.14,C+p*-C+u*-.283,0,0,E+p*-E+u*-(1-E),W+p*-W+u*W,C+p*(1-C)+u*C,0,0,0,0,0,1,0])},desaturateLuminance:()=>{b.colorMatrix([.2764723,.929708,.0938197,0,-37.1,.2764723,.929708,.0938197,0,-37.1,.2764723,.929708,.0938197,0,-37.1,0,0,0,1,0])},sepia:()=>{b.colorMatrix([.393,.7689999,.18899999,0,0,.349,.6859999,.16799999,0,0,.272,.5339999,.13099999,0,0,0,0,0,1,0])},brownie:()=>{b.colorMatrix([.5997023498159715,.34553243048391263,-.2708298674538042,0,47.43192855600873,-.037703249837783157,.8609577587992641,.15059552388459913,0,-36.96841498319127,.24113635128153335,-.07441037908422492,.44972182064877153,0,-7.562075277591283,0,0,0,1,0])},vintagePinhole:()=>{b.colorMatrix([.6279345635605994,.3202183420819367,-.03965408211312453,0,9.651285835294123,.02578397704808868,.6441188644374771,.03259127616149294,0,7.462829176470591,.0466055556782719,-.0851232987247891,.5241648018700465,0,5.159190588235296,0,0,0,1,0])},kodachrome:()=>{b.colorMatrix([1.1285582396593525,-.3967382283601348,-.03992559172921793,0,63.72958762196502,-.16404339962244616,1.0835251566291304,-.05498805115633132,0,24.732407896706203,-.16786010706155763,-.5603416277695248,1.6014850761964943,0,35.62982807460946,0,0,0,1,0])},technicolor:()=>{b.colorMatrix([1.9125277891456083,-.8545344976951645,-.09155508482755585,0,11.793603434377337,-.3087833385928097,1.7658908555458428,-.10601743074722245,0,-70.35205161461398,-.231103377548616,-.7501899197440212,1.847597816108189,0,30.950940869491138,0,0,0,1,0])},polaroid:()=>{b.colorMatrix([1.438,-.062,-.062,0,0,-.122,1.378,-.122,0,0,-.016,-.016,1.483,0,0,0,0,0,1,0])},shiftToBGR:()=>{b.colorMatrix([0,0,1,0,0,0,1,0,0,0,1,0,0,0,0,0,0,0,1,0])},convolution:P=>{let p=new Float32Array(P),u=1/x.width,E=1/x.height,W=v(ho);!W||(y.uniform1fv(W.uniform.m,p),y.uniform2f(W.uniform.px,u,E),m())},detectEdges:()=>{b.convolution.call(this,[0,1,0,1,-4,1,0,1,0])},sobelX:()=>{b.convolution.call(this,[-1,0,1,-2,0,2,-1,0,1])},sobelY:()=>{b.convolution.call(this,[-1,-2,-1,0,0,0,1,2,1])},sharpen:P=>{let p=P||1;b.convolution.call(this,[0,-1*p,0,-1*p,1+4*p,-1*p,0,-1*p,0])},emboss:P=>{let p=P||1;b.convolution.call(this,[-2*p,-1*p,0,-1*p,1,1*p,0,1*p,2*p])},blur:P=>{let p=P/7/x.width,u=P/7/x.height,E=v(uo);!E||(y.uniform2f(E.uniform.px,0,u),m(l.INTERMEDIATE),y.uniform2f(E.uniform.px,p,0),m())},pixelate:P=>{let p=P/x.width,u=P/x.height,E=v(po);!E||(y.uniform2f(E.uniform.size,p,u),m())}};this.add=function(P){let p=Array.prototype.slice.call(arguments,1),u=b[P];s.push({func:u,args:p})},this.reset=function(){s=[]},this.get=function(){return s},this.apply=function(P){c(P.width,P.height),e=0,t||(t=y.createTexture()),y.bindTexture(y.TEXTURE_2D,t),y.texParameteri(y.TEXTURE_2D,y.TEXTURE_WRAP_S,y.CLAMP_TO_EDGE),y.texParameteri(y.TEXTURE_2D,y.TEXTURE_WRAP_T,y.CLAMP_TO_EDGE),y.texParameteri(y.TEXTURE_2D,y.TEXTURE_MIN_FILTER,y.NEAREST),y.texParameteri(y.TEXTURE_2D,y.TEXTURE_MAG_FILTER,y.NEAREST),y.texImage2D(y.TEXTURE_2D,0,y.RGBA,y.RGBA,y.UNSIGNED_BYTE,P);for(let p=0;pf.data())),a=.99*Math.max(s[0][0],s[1][0],s[2][0]),i=[r.sub(o[0],A[0]),r.sub(o[1],A[1]),r.sub(o[2],A[2])],x=[r.sub(n[0],A[0]),r.sub(n[1],A[1]),r.sub(n[2],A[2])],d=[r.div(a,x[0]),r.div(a,x[1]),r.div(a,x[2])],l=[r.mul(i[0],d[0]),r.mul(i[1],d[1]),r.mul(i[2],d[2])],y=r.stack([l[0],l[1],l[2]],2),c=r.reshape(y,[1,t.shape[0],t.shape[1],3]);return r.dispose([...o,...A,...n,...i,...x,...d,...l,y,t]),c}var De=2048,G=null,_=null,ae=null,D,z0={inputSum:0,cacheDiff:1,sumMethod:0,inputTensor:void 0};function s0(e,t){let o;if(R.browser)if(R.worker){if(typeof OffscreenCanvas=="undefined")throw new Error("canvas error: attempted to run in web worker but OffscreenCanvas is not supported");o=new OffscreenCanvas(e,t)}else{if(typeof document=="undefined")throw new Error("canvas error: attempted to run in browser but DOM is not defined");o=document.createElement("canvas"),o.width=e,o.height=t}else typeof R.Canvas!="undefined"?o=new R.Canvas(e,t):typeof globalThis.Canvas!="undefined"&&(o=new globalThis.Canvas(e,t));return o}function St(e,t){let o=t||s0(e.width,e.height);return o.getContext("2d").drawImage(e,0,0),o}async function ie(e,t,o=!0){if(!e)return t.debug&&g("input error: input is missing"),{tensor:null,canvas:null};if(!(e instanceof se)&&!(typeof Image!="undefined"&&e instanceof Image)&&!(typeof R.Canvas!="undefined"&&e instanceof R.Canvas)&&!(typeof globalThis.Canvas!="undefined"&&e instanceof globalThis.Canvas)&&!(typeof ImageData!="undefined"&&e instanceof ImageData)&&!(typeof ImageBitmap!="undefined"&&e instanceof ImageBitmap)&&!(typeof HTMLImageElement!="undefined"&&e instanceof HTMLImageElement)&&!(typeof HTMLMediaElement!="undefined"&&e instanceof HTMLMediaElement)&&!(typeof HTMLVideoElement!="undefined"&&e instanceof HTMLVideoElement)&&!(typeof HTMLCanvasElement!="undefined"&&e instanceof HTMLCanvasElement)&&!(typeof OffscreenCanvas!="undefined"&&e instanceof OffscreenCanvas))throw new Error("input error: type is not recognized");if(e instanceof se){let A=null;if(e.isDisposedInternal)throw new Error("input error: attempted to use tensor but it is disposed");if(!e.shape)throw new Error("input error: attempted to use tensor without a shape");if(e.shape.length===3){if(e.shape[2]===3)A=r.expandDims(e,0);else if(e.shape[2]===4){let n=r.slice3d(e,[0,0,0],[-1,-1,3]);A=r.expandDims(n,0),r.dispose(n)}}else e.shape.length===4&&(e.shape[3]===3?A=r.clone(e):e.shape[3]===4&&(A=r.slice4d(e,[0,0,0,0],[-1,-1,-1,3])));if(A==null||A.shape.length!==4||A.shape[0]!==1||A.shape[3]!==3)throw new Error(`input error: attempted to use tensor with unrecognized shape: ${e.shape}`);if(A.dtype==="int32"){let n=r.cast(A,"float32");r.dispose(A),A=n}return{tensor:A,canvas:t.filter.return?_:null}}else{if(typeof e.readyState!="undefined"&&e.readyState<=2)return t.debug&&g("input stream is not ready"),{tensor:null,canvas:G};let A=e.naturalWidth||e.videoWidth||e.width||e.shape&&e.shape[1]>0,n=e.naturalHeight||e.videoHeight||e.height||e.shape&&e.shape[2]>0;if(!A||!n)return t.debug&&g("cannot determine input dimensions"),{tensor:null,canvas:G};let s=A,a=n;if(s>De&&(s=De,a=Math.trunc(s*n/A)),a>De&&(a=De,s=Math.trunc(a*A/n)),(t.filter.width||0)>0?s=t.filter.width:(t.filter.height||0)>0&&(s=A*((t.filter.height||0)/n)),(t.filter.height||0)>0?a=t.filter.height:(t.filter.width||0)>0&&(a=n*((t.filter.width||0)/A)),!s||!a)throw new Error("input error: cannot determine dimension");(!G||(G==null?void 0:G.width)!==s||(G==null?void 0:G.height)!==a)&&(G=s0(s,a));let i=G.getContext("2d");if(typeof ImageData!="undefined"&&e instanceof ImageData?i.putImageData(e,0,0):t.filter.flip&&typeof i.translate!="undefined"?(i.translate(A,0),i.scale(-1,1),i.drawImage(e,0,0,A,n,0,0,G==null?void 0:G.width,G==null?void 0:G.height),i.setTransform(1,0,0,1,0,0)):i.drawImage(e,0,0,A,n,0,0,G==null?void 0:G.width,G==null?void 0:G.height),(!_||G.width!==_.width||(G==null?void 0:G.height)!==(_==null?void 0:_.height))&&(_=s0(G.width,G.height)),t.filter.enabled&&R.webgl.supported){if(D||(D=R.browser?new go:null),R.filter=!!D,!D||!D.add)return t.debug&&g("input process error: cannot initialize filters"),{tensor:null,canvas:G};D.reset(),t.filter.brightness!==0&&D.add("brightness",t.filter.brightness),t.filter.contrast!==0&&D.add("contrast",t.filter.contrast),t.filter.sharpness!==0&&D.add("sharpen",t.filter.sharpness),t.filter.blur!==0&&D.add("blur",t.filter.blur),t.filter.saturation!==0&&D.add("saturation",t.filter.saturation),t.filter.hue!==0&&D.add("hue",t.filter.hue),t.filter.negative&&D.add("negative"),t.filter.sepia&&D.add("sepia"),t.filter.vintage&&D.add("brownie"),t.filter.sepia&&D.add("sepia"),t.filter.kodachrome&&D.add("kodachrome"),t.filter.technicolor&&D.add("technicolor"),t.filter.polaroid&&D.add("polaroid"),t.filter.pixelate!==0&&D.add("pixelate",t.filter.pixelate),D.get()>0?_=D.apply(G):_=D.draw(G)}else St(G,_),D&&(D=null),R.filter=!!D;if(!o)return{tensor:null,canvas:_};if(!_)throw new Error("canvas error: cannot create output");let x,d=3;if(typeof ImageData!="undefined"&&e instanceof ImageData||e.data&&e.width&&e.height)if(R.browser&&r.browser)x=r.browser?r.browser.fromPixels(e):null;else{d=e.data.length/e.height/e.width;let c=new Uint8Array(e.data.buffer);x=r.tensor(c,[e.height,e.width,d],"int32")}else if((!ae||_.width!==ae.width||_.height!==ae.height)&&(ae=s0(_.width,_.height)),r.browser&&R.browser)t.backend==="webgl"||t.backend==="humangl"||t.backend==="webgpu"?x=r.browser.fromPixels(_):(ae=St(_),x=r.browser.fromPixels(ae));else{let h=St(_).getContext("2d").getImageData(0,0,s,a);d=h.data.length/s/a;let m=new Uint8Array(h.data.buffer);x=r.tensor(m,[s,a,d])}if(d===4){let c=r.slice3d(x,[0,0,0],[-1,-1,3]);r.dispose(x),x=c}if(!x)throw new Error("input error: cannot create tensor");let l=r.cast(x,"float32"),y=t.filter.equalization?await Xe(l):r.expandDims(l,0);return r.dispose([x,l]),{tensor:y,canvas:t.filter.return?_:null}}}async function Mo(e,t){let o=!1;if(e.cacheSensitivity===0||!t.shape||t.shape.length!==4||t.shape[1]>2048||t.shape[2]>2048)return o;if(!z0.inputTensor)z0.inputTensor=r.clone(t);else if(z0.inputTensor.shape[1]!==t.shape[1]||z0.inputTensor.shape[2]!==t.shape[2])r.dispose(z0.inputTensor),z0.inputTensor=r.clone(t);else{let A={};A.diff=r.sub(t,z0.inputTensor),A.squared=r.mul(A.diff,A.diff),A.sum=r.sum(A.squared);let s=(await A.sum.data())[0]/(t.shape[1]||1)/(t.shape[2]||1)/255/3;r.dispose([z0.inputTensor,A.diff,A.squared,A.sum]),z0.inputTensor=r.clone(t),o=s<=(e.cacheSensitivity||0)}return o}async function Po(e,t,o){let A={};if(!t||!o||t.shape.length!==4||t.shape.length!==o.shape.length)return e.debug||g("invalid input tensor or tensor shapes do not match:",t.shape,o.shape),0;if(t.shape[0]!==1||o.shape[0]!==1||t.shape[3]!==3||o.shape[3]!==3)return e.debug||g("input tensors must be of shape [1, height, width, 3]:",t.shape,o.shape),0;A.input1=r.clone(t),A.input2=t.shape[1]!==o.shape[1]||t.shape[2]!==o.shape[2]?r.image.resizeBilinear(o,[t.shape[1],t.shape[2]]):r.clone(o),A.diff=r.sub(A.input1,A.input2),A.squared=r.mul(A.diff,A.diff),A.sum=r.sum(A.squared);let s=(await A.sum.data())[0]/(t.shape[1]||1)/(t.shape[2]||1)/255/3;return r.dispose([A.input1,A.input2,A.diff,A.squared,A.sum]),s}var vo=class{constructor(){w(this,"browser");w(this,"node");w(this,"worker");w(this,"platform","");w(this,"agent","");w(this,"backends",[]);w(this,"initial");w(this,"filter");w(this,"tfjs");w(this,"offscreen");w(this,"perfadd",!1);w(this,"wasm",{supported:void 0,backend:void 0,simd:void 0,multithread:void 0});w(this,"webgl",{supported:void 0,backend:void 0,version:void 0,renderer:void 0});w(this,"webgpu",{supported:void 0,backend:void 0,adapter:void 0});w(this,"cpu",{model:void 0,flags:[]});w(this,"kernels",[]);w(this,"Canvas");w(this,"Image");w(this,"ImageData");if(this.browser=typeof navigator!="undefined",this.node=typeof process!="undefined"&&typeof process.versions!="undefined"&&typeof process.versions.node!="undefined",this.tfjs={version:je["tfjs-core"]},this.offscreen=typeof OffscreenCanvas!="undefined",this.initial=!0,this.worker=this.browser&&this.offscreen?typeof WorkerGlobalScope!="undefined":void 0,typeof navigator!="undefined"){let t=navigator.userAgent.match(/\(([^()]+)\)/g);if(t&&t[0]){let o=t[0].match(/\(([^()]+)\)/g);this.platform=o&&o[0]?o[0].replace(/\(|\)/g,""):"",this.agent=navigator.userAgent.replace(t[0],""),this.platform[1]&&(this.agent=this.agent.replace(t[1],"")),this.agent=this.agent.replace(/ /g," ")}}else typeof process!="undefined"&&(this.platform=`${process.platform} ${process.arch}`,this.agent=`NodeJS ${process.version}`)}async updateBackend(){this.backends=Object.keys(r.engine().registryFactory),this.wasm.supported=typeof WebAssembly!="undefined",this.wasm.backend=this.backends.includes("wasm"),this.wasm.supported&&this.wasm.backend&&r.getBackend()==="wasm"&&(this.wasm.simd=await r.env().getAsync("WASM_HAS_SIMD_SUPPORT"),this.wasm.multithread=await r.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT"));let t=s0(100,100),o=t?t.getContext("webgl2"):void 0;if(this.webgl.supported=typeof o!="undefined",this.webgl.backend=this.backends.includes("webgl"),this.webgl.supported&&this.webgl.backend&&(r.getBackend()==="webgl"||r.getBackend()==="humangl")){let A=r.backend().gpgpu!=="undefined"?await r.backend().getGPGPUContext().gl:null;A&&(this.webgl.version=A.getParameter(A.VERSION),this.webgl.renderer=A.getParameter(A.RENDERER))}this.webgpu.supported=this.browser&&typeof navigator.gpu!="undefined",this.webgpu.backend=this.backends.includes("webgpu");try{this.webgpu.supported&&(this.webgpu.adapter=(await navigator.gpu.requestAdapter()).name)}catch(A){this.webgpu.supported=!1}try{this.kernels=r.getKernelsForBackend(r.getBackend()).map(A=>A.kernelName.toLowerCase())}catch(A){}}async updateCPU(){let t={model:"",flags:[]};this.node&&this.platform.startsWith("linux"),this.cpu?this.cpu=t:Object.defineProperty(this,"cpu",{value:t})}},R=new vo;var j0={cacheModels:!1,verbose:!0,debug:!1,modelBasePath:""};async function gr(e,t){return j0.debug&&g("load model fetch:",e,t),fetch(e,t)}function To(e){j0.cacheModels=e.cacheModels,j0.verbose=e.debug,j0.modelBasePath=e.modelBasePath}async function N(e){let t=xo(j0.modelBasePath,e||""),o=t.split("/"),A="indexeddb://"+o[o.length-1].replace(".json",""),n=await r.io.listModels(),s=j0.cacheModels&&Object.keys(n).includes(A),a=typeof fetch=="undefined"?{}:{fetchFunc:(x,d)=>gr(x,d)},i=new zt(s?A:t,a);try{i.findIOHandler(),j0.debug&&g("model load handler:",i.handler);let x=await i.handler.load();i.loadSync(x),j0.verbose&&g("load model:",i.modelUrl)}catch(x){g("error loading model:",t,x)}if(j0.cacheModels&&!s)try{let x=await i.save(A);g("model saved:",A,x)}catch(x){g("error saving model:",t,x)}return i}var Wt="2.6.2";var u0,Ct=[],vr=["white","black","asian","indian","other"],Tr=[15,23,28,35.5,45.5,55.5,65],Ro=0,wo=0,It=Number.MAX_SAFE_INTEGER;async function ko(e){return R.initial&&(u0=null),u0?e.debug&&g("cached model:",u0.modelUrl):u0=await N(e.face.gear),u0}async function Ot(e,t,o,A){var a,i;if(!u0)return{age:0,gender:"unknown",genderScore:0,race:[]};let n=It<(((a=t.face.gear)==null?void 0:a.skipFrames)||0),s=(((i=t.face.gear)==null?void 0:i.skipTime)||0)>M()-wo;return t.skipAllowed&&s&&n&&Ro===A&&Ct[o]?(It++,Ct[o]):(It=0,new Promise(async x=>{var b,P;if(!(u0==null?void 0:u0.inputs[0].shape))return;let d={},l=[[0,.1,.9,.9]];d.resize=r.image.cropAndResize(e,l,[0],[u0.inputs[0].shape[2],u0.inputs[0].shape[1]]);let y={age:0,gender:"unknown",genderScore:0,race:[]};((b=t.face.gear)==null?void 0:b.enabled)&&([d.age,d.gender,d.race]=u0.execute(d.resize,["age_output","gender_output","race_output"]));let c=await d.gender.data();y.gender=c[0]>c[1]?"male":"female",y.genderScore=Math.round(100*(c[0]>c[1]?c[0]:c[1]))/100;let f=await d.race.data();for(let p=0;p(((P=t.face.gear)==null?void 0:P.minConfidence)||.2)&&y.race.push({score:Math.round(100*f[p])/100,race:vr[p]});y.race.sort((p,u)=>u.score-p.score);let m=Array.from(await d.age.data()).map((p,u)=>[Tr[u],p]).sort((p,u)=>u[1]-p[1]),v=m[0][0];for(let p=1;pr.dispose(d[p])),Ct[o]=y,Ro=A,wo=M(),x(y)}))}var L={tf255:255,tf1:1,tf2:2,tf05:.5,tf127:127.5,rgb:[.2989,.587,.114]};function zo(){L.tf255=r.scalar(255,"float32"),L.tf1=r.scalar(1,"float32"),L.tf2=r.scalar(2,"float32"),L.tf05=r.scalar(.5,"float32"),L.tf127=r.scalar(127.5,"float32"),L.rgb=r.tensor1d([.2989,.587,.114],"float32")}var l0,qe=[],jo=0,So=0,Nt=Number.MAX_SAFE_INTEGER;async function Wo(e){return R.initial&&(l0=null),l0?e.debug&&g("cached model:",l0.modelUrl):l0=await N(e.face.ssrnet.modelPathAge),l0}async function Lt(e,t,o,A){var a,i,x,d;if(!l0)return{age:0};let n=Nt<(((a=t.face.ssrnet)==null?void 0:a.skipFrames)||0),s=(((i=t.face.ssrnet)==null?void 0:i.skipTime)||0)>M()-So;return t.skipAllowed&&n&&s&&jo===A&&((x=qe[o])==null?void 0:x.age)&&((d=qe[o])==null?void 0:d.age)>0?(Nt++,qe[o]):(Nt=0,new Promise(async l=>{if(!(l0==null?void 0:l0.inputs)||!l0.inputs[0]||!l0.inputs[0].shape)return;let y={};y.resize=r.image.resizeBilinear(e,[l0.inputs[0].shape[2],l0.inputs[0].shape[1]],!1),y.enhance=r.mul(y.resize,L.tf255);let c={age:0};if(t.face.ssrnet.enabled&&(y.age=l0.execute(y.enhance)),y.age){let f=await y.age.data();c.age=Math.trunc(10*f[0])/10}Object.keys(y).forEach(f=>r.dispose(y[f])),qe[o]=c,jo=A,So=M(),l(c)}))}var h0,Ue=[],Io=0,Oo=0,Bt=Number.MAX_SAFE_INTEGER,Gt=[.2989,.587,.114];async function No(e){return R.initial&&(h0=null),h0?e.debug&&g("cached model:",h0.modelUrl):h0=await N(e.face.ssrnet.modelPathGender),h0}async function Ht(e,t,o,A){var a,i,x,d;if(!h0)return{gender:"unknown",genderScore:0};let n=Bt<(((a=t.face.ssrnet)==null?void 0:a.skipFrames)||0),s=(((i=t.face.ssrnet)==null?void 0:i.skipTime)||0)>M()-Oo;return t.skipAllowed&&n&&s&&Io===A&&((x=Ue[o])==null?void 0:x.gender)&&((d=Ue[o])==null?void 0:d.genderScore)>0?(Bt++,Ue[o]):(Bt=0,new Promise(async l=>{if(!(h0==null?void 0:h0.inputs[0].shape))return;let y={};y.resize=r.image.resizeBilinear(e,[h0.inputs[0].shape[2],h0.inputs[0].shape[1]],!1),y.enhance=r.tidy(()=>{let[h,m,v]=r.split(y.resize,3,3),b=r.mul(h,Gt[0]),P=r.mul(m,Gt[1]),p=r.mul(v,Gt[2]),u=r.addN([b,P,p]);return r.mul(r.sub(u,L.tf05),2)});let c={gender:"unknown",genderScore:0};t.face.ssrnet.enabled&&(y.gender=h0.execute(y.enhance));let f=await y.gender.data();c.gender=f[0]>f[1]?"female":"male",c.genderScore=f[0]>f[1]?Math.trunc(100*f[0])/100:Math.trunc(100*f[1])/100,Object.keys(y).forEach(h=>r.dispose(y[h])),Ue[o]=c,Io=A,Oo=M(),l(c)}))}var A0,Je=[],Ft=Number.MAX_SAFE_INTEGER,Bo=0,Go=0;async function Ho(e){var t;return R.initial&&(A0=null),A0?e.debug&&g("cached model:",A0.modelUrl):A0=await N((t=e.face.antispoof)==null?void 0:t.modelPath),A0}async function Vt(e,t,o,A){var a,i;if(!A0)return 0;let n=(((a=t.face.antispoof)==null?void 0:a.skipTime)||0)>M()-Go,s=Ft<(((i=t.face.antispoof)==null?void 0:i.skipFrames)||0);return t.skipAllowed&&n&&s&&Bo===A&&Je[o]?(Ft++,Je[o]):(Ft=0,new Promise(async x=>{let d=r.image.resizeBilinear(e,[(A0==null?void 0:A0.inputs[0].shape)?A0.inputs[0].shape[2]:0,(A0==null?void 0:A0.inputs[0].shape)?A0.inputs[0].shape[1]:0],!1),l=A0==null?void 0:A0.execute(d),y=(await l.data())[0];Je[o]=Math.round(100*y)/100,Bo=A,Go=M(),r.dispose([d,l]),x(Je[o])}))}var b0={silhouette:[10,338,297,332,284,251,389,356,454,323,361,288,397,365,379,378,400,377,152,148,176,149,150,136,172,58,132,93,234,127,162,21,54,103,67,109],lipsUpperOuter:[61,185,40,39,37,0,267,269,270,409,291],lipsLowerOuter:[146,91,181,84,17,314,405,321,375,291],lipsUpperInner:[78,191,80,81,82,13,312,311,310,415,308],lipsLowerInner:[78,95,88,178,87,14,317,402,318,324,308],rightEyeUpper0:[246,161,160,159,158,157,173],rightEyeLower0:[33,7,163,144,145,153,154,155,133],rightEyeUpper1:[247,30,29,27,28,56,190],rightEyeLower1:[130,25,110,24,23,22,26,112,243],rightEyeUpper2:[113,225,224,223,222,221,189],rightEyeLower2:[226,31,228,229,230,231,232,233,244],rightEyeLower3:[143,111,117,118,119,120,121,128,245],rightEyebrowUpper:[156,70,63,105,66,107,55,193],rightEyebrowLower:[35,124,46,53,52,65],rightEyeIris:[473,474,475,476,477],leftEyeUpper0:[466,388,387,386,385,384,398],leftEyeLower0:[263,249,390,373,374,380,381,382,362],leftEyeUpper1:[467,260,259,257,258,286,414],leftEyeLower1:[359,255,339,254,253,252,256,341,463],leftEyeUpper2:[342,445,444,443,442,441,413],leftEyeLower2:[446,261,448,449,450,451,452,453,464],leftEyeLower3:[372,340,346,347,348,349,350,357,465],leftEyebrowUpper:[383,300,293,334,296,336,285,417],leftEyebrowLower:[265,353,276,283,282,295],leftEyeIris:[468,469,470,471,472],midwayBetweenEyes:[168],noseTip:[1],noseBottom:[2],noseRightCorner:[98],noseLeftCorner:[327],rightCheek:[205],leftCheek:[425]},Zt={count:468,mouth:13,symmetryLine:[13,b0.midwayBetweenEyes[0]]},We={leftEye:0,rightEye:1,nose:2,mouth:3,leftEar:4,rightEar:5,symmetryLine:[3,2]},Xt=[{key:"EyeUpper0",indices:[9,10,11,12,13,14,15]},{key:"EyeUpper1",indices:[25,26,27,28,29,30,31]},{key:"EyeUpper2",indices:[41,42,43,44,45,46,47]},{key:"EyeLower0",indices:[0,1,2,3,4,5,6,7,8]},{key:"EyeLower1",indices:[16,17,18,19,20,21,22,23,24]},{key:"EyeLower2",indices:[32,33,34,35,36,37,38,39,40]},{key:"EyeLower3",indices:[54,55,56,57,58,59,60,61,62]}],Ce=[[.499976992607117,.652534008026123],[.500025987625122,.547487020492554],[.499974012374878,.602371990680695],[.482113003730774,.471979022026062],[.500150978565216,.527155995368958],[.499909996986389,.498252987861633],[.499523013830185,.40106201171875],[.289712011814117,.380764007568359],[.499954998493195,.312398016452789],[.499987006187439,.269918978214264],[.500023007392883,.107050001621246],[.500023007392883,.666234016418457],[.5000159740448,.679224014282227],[.500023007392883,.692348003387451],[.499976992607117,.695277988910675],[.499976992607117,.70593398809433],[.499976992607117,.719385027885437],[.499976992607117,.737019002437592],[.499967992305756,.781370997428894],[.499816000461578,.562981009483337],[.473773002624512,.573909997940063],[.104906998574734,.254140973091125],[.365929991006851,.409575998783112],[.338757991790771,.41302502155304],[.311120003461838,.409460008144379],[.274657994508743,.389131009578705],[.393361985683441,.403706014156342],[.345234006643295,.344011008739471],[.370094001293182,.346076011657715],[.319321990013123,.347265005111694],[.297903001308441,.353591024875641],[.24779200553894,.410809993743896],[.396889001131058,.842755019664764],[.280097991228104,.375599980354309],[.106310002505779,.399955987930298],[.2099249958992,.391353011131287],[.355807989835739,.534406006336212],[.471751004457474,.65040397644043],[.474155008792877,.680191993713379],[.439785003662109,.657229006290436],[.414617002010345,.66654098033905],[.450374007225037,.680860996246338],[.428770989179611,.682690978050232],[.374971002340317,.727805018424988],[.486716985702515,.547628998756409],[.485300987958908,.527395009994507],[.257764995098114,.314490020275116],[.401223003864288,.455172002315521],[.429818987846375,.548614978790283],[.421351999044418,.533740997314453],[.276895999908447,.532056987285614],[.483370006084442,.499586999416351],[.33721199631691,.282882988452911],[.296391993761063,.293242990970612],[.169294998049736,.193813979625702],[.447580009698868,.302609980106354],[.392390012741089,.353887975215912],[.354490011930466,.696784019470215],[.067304998636246,.730105042457581],[.442739009857178,.572826027870178],[.457098007202148,.584792017936707],[.381974011659622,.694710969924927],[.392388999462128,.694203019142151],[.277076005935669,.271932005882263],[.422551989555359,.563233017921448],[.385919004678726,.281364023685455],[.383103013038635,.255840003490448],[.331431001424789,.119714021682739],[.229923993349075,.232002973556519],[.364500999450684,.189113974571228],[.229622006416321,.299540996551514],[.173287004232407,.278747975826263],[.472878992557526,.666198015213013],[.446828007698059,.668527007102966],[.422762006521225,.673889994621277],[.445307999849319,.580065965652466],[.388103008270264,.693961024284363],[.403039008378983,.706539988517761],[.403629004955292,.693953037261963],[.460041999816895,.557139039039612],[.431158006191254,.692366003990173],[.452181994915009,.692366003990173],[.475387006998062,.692366003990173],[.465828001499176,.779190003871918],[.472328990697861,.736225962638855],[.473087012767792,.717857003211975],[.473122000694275,.704625964164734],[.473033010959625,.695277988910675],[.427942007780075,.695277988910675],[.426479011774063,.703539967536926],[.423162013292313,.711845993995667],[.4183090031147,.720062971115112],[.390094995498657,.639572978019714],[.013953999616206,.560034036636353],[.499913990497589,.58014702796936],[.413199990987778,.69539999961853],[.409626007080078,.701822996139526],[.468080013990402,.601534962654114],[.422728985548019,.585985004901886],[.463079988956451,.593783974647522],[.37211999297142,.47341400384903],[.334562003612518,.496073007583618],[.411671012639999,.546965003013611],[.242175996303558,.14767599105835],[.290776997804642,.201445996761322],[.327338010072708,.256527006626129],[.399509996175766,.748921036720276],[.441727995872498,.261676013469696],[.429764986038208,.187834024429321],[.412198007106781,.108901023864746],[.288955003023148,.398952007293701],[.218936994671822,.435410976409912],[.41278201341629,.398970007896423],[.257135003805161,.355440020561218],[.427684992551804,.437960982322693],[.448339998722076,.536936044692993],[.178560003638268,.45755398273468],[.247308000922203,.457193970680237],[.286267012357712,.467674970626831],[.332827985286713,.460712015628815],[.368755996227264,.447206974029541],[.398963987827301,.432654976844788],[.476410001516342,.405806005001068],[.189241006970406,.523923993110657],[.228962004184723,.348950982093811],[.490725994110107,.562400996685028],[.404670000076294,.485132992267609],[.019469000399113,.401564002037048],[.426243007183075,.420431017875671],[.396993011236191,.548797011375427],[.266469985246658,.376977026462555],[.439121007919312,.51895797252655],[.032313998788595,.644356966018677],[.419054001569748,.387154996395111],[.462783008813858,.505746960639954],[.238978996872902,.779744982719421],[.198220998048782,.831938028335571],[.107550002634525,.540755033493042],[.183610007166862,.740257024765015],[.134409993886948,.333683013916016],[.385764002799988,.883153975009918],[.490967005491257,.579378008842468],[.382384985685349,.508572995662689],[.174399003386497,.397670984268188],[.318785011768341,.39623498916626],[.343364000320435,.400596976280212],[.396100014448166,.710216999053955],[.187885001301765,.588537991046906],[.430987000465393,.944064974784851],[.318993002176285,.898285031318665],[.266247987747192,.869701027870178],[.500023007392883,.190576016902924],[.499976992607117,.954452991485596],[.366169989109039,.398822009563446],[.393207013607025,.39553701877594],[.410373002290726,.391080021858215],[.194993004202843,.342101991176605],[.388664990663528,.362284004688263],[.365961998701096,.355970978736877],[.343364000320435,.355356991291046],[.318785011768341,.35834002494812],[.301414996385574,.363156020641327],[.058132998645306,.319076001644135],[.301414996385574,.387449026107788],[.499987989664078,.618434011936188],[.415838003158569,.624195992946625],[.445681989192963,.566076993942261],[.465844005346298,.620640993118286],[.49992299079895,.351523995399475],[.288718998432159,.819945991039276],[.335278987884521,.852819979190826],[.440512001514435,.902418971061707],[.128294005990028,.791940987110138],[.408771991729736,.373893976211548],[.455606997013092,.451801002025604],[.499877005815506,.908990025520325],[.375436991453171,.924192011356354],[.11421000212431,.615022003650665],[.448662012815475,.695277988910675],[.4480200111866,.704632043838501],[.447111994028091,.715808033943176],[.444831997156143,.730794012546539],[.430011987686157,.766808986663818],[.406787008047104,.685672998428345],[.400738000869751,.681069016456604],[.392399996519089,.677703022956848],[.367855995893478,.663918972015381],[.247923001646996,.601333022117615],[.452769994735718,.420849978923798],[.43639200925827,.359887003898621],[.416164010763168,.368713974952698],[.413385987281799,.692366003990173],[.228018000721931,.683571994304657],[.468268007040024,.352671027183533],[.411361992359161,.804327011108398],[.499989002943039,.469825029373169],[.479153990745544,.442654013633728],[.499974012374878,.439637005329132],[.432112008333206,.493588984012604],[.499886006116867,.866917014122009],[.49991300702095,.821729004383087],[.456548988819122,.819200992584229],[.344549000263214,.745438992977142],[.37890899181366,.574010014533997],[.374292999505997,.780184984207153],[.319687992334366,.570737957954407],[.357154995203018,.604269981384277],[.295284003019333,.621580958366394],[.447750002145767,.862477004528046],[.410986006259918,.508723020553589],[.31395098567009,.775308012962341],[.354128003120422,.812552988529205],[.324548006057739,.703992962837219],[.189096003770828,.646299958229065],[.279776990413666,.71465802192688],[.1338230073452,.682700991630554],[.336768001317978,.644733011722565],[.429883986711502,.466521978378296],[.455527991056442,.548622965812683],[.437114000320435,.558896005153656],[.467287987470627,.529924988746643],[.414712011814117,.335219979286194],[.37704598903656,.322777986526489],[.344107985496521,.320150971412659],[.312875986099243,.32233202457428],[.283526003360748,.333190023899078],[.241245999932289,.382785975933075],[.102986000478268,.468762993812561],[.267612010240555,.424560010433197],[.297879010438919,.433175981044769],[.333433985710144,.433878004550934],[.366427004337311,.426115989685059],[.396012008190155,.416696012020111],[.420121014118195,.41022801399231],[.007561000064015,.480777025222778],[.432949006557465,.569517970085144],[.458638995885849,.479089021682739],[.473466008901596,.545744001865387],[.476087987422943,.563830018043518],[.468472003936768,.555056989192963],[.433990985155106,.582361996173859],[.483518004417419,.562983989715576],[.482482999563217,.57784903049469],[.42645001411438,.389798998832703],[.438998997211456,.39649498462677],[.450067013502121,.400434017181396],[.289712011814117,.368252992630005],[.276670008897781,.363372981548309],[.517862021923065,.471948027610779],[.710287988185883,.380764007568359],[.526226997375488,.573909997940063],[.895093023777008,.254140973091125],[.634069979190826,.409575998783112],[.661242008209229,.41302502155304],[.688880026340485,.409460008144379],[.725341975688934,.389131009578705],[.606630027294159,.40370500087738],[.654766023159027,.344011008739471],[.629905998706818,.346076011657715],[.680678009986877,.347265005111694],[.702096998691559,.353591024875641],[.75221198797226,.410804986953735],[.602918028831482,.842862963676453],[.719901978969574,.375599980354309],[.893692970275879,.399959981441498],[.790081977844238,.391354024410248],[.643998026847839,.534487962722778],[.528249025344849,.65040397644043],[.525849997997284,.680191040039062],[.560214996337891,.657229006290436],[.585384011268616,.66654098033905],[.549625992774963,.680860996246338],[.57122802734375,.682691991329193],[.624852001667023,.72809898853302],[.513050019741058,.547281980514526],[.51509702205658,.527251958847046],[.742246985435486,.314507007598877],[.598631024360657,.454979002475739],[.570338010787964,.548575043678284],[.578631997108459,.533622980117798],[.723087012767792,.532054007053375],[.516445994377136,.499638974666595],[.662801027297974,.282917976379395],[.70362401008606,.293271005153656],[.830704987049103,.193813979625702],[.552385985851288,.302568018436432],[.607609987258911,.353887975215912],[.645429015159607,.696707010269165],[.932694971561432,.730105042457581],[.557260990142822,.572826027870178],[.542901992797852,.584792017936707],[.6180260181427,.694710969924927],[.607590973377228,.694203019142151],[.722943007946014,.271963000297546],[.577413976192474,.563166975975037],[.614082992076874,.281386971473694],[.616907000541687,.255886018276215],[.668509006500244,.119913995265961],[.770092010498047,.232020974159241],[.635536015033722,.189248979091644],[.77039098739624,.299556016921997],[.826722025871277,.278755009174347],[.527121007442474,.666198015213013],[.553171992301941,.668527007102966],[.577238023281097,.673889994621277],[.554691970348358,.580065965652466],[.611896991729736,.693961024284363],[.59696102142334,.706539988517761],[.596370995044708,.693953037261963],[.539958000183105,.557139039039612],[.568841993808746,.692366003990173],[.547818005084991,.692366003990173],[.52461302280426,.692366003990173],[.534089982509613,.779141008853912],[.527670979499817,.736225962638855],[.526912987232208,.717857003211975],[.526877999305725,.704625964164734],[.526966989040375,.695277988910675],[.572058022022247,.695277988910675],[.573521018028259,.703539967536926],[.57683801651001,.711845993995667],[.581691026687622,.720062971115112],[.609944999217987,.639909982681274],[.986046016216278,.560034036636353],[.5867999792099,.69539999961853],[.590372025966644,.701822996139526],[.531915009021759,.601536989212036],[.577268004417419,.585934996604919],[.536915004253387,.593786001205444],[.627542972564697,.473352015018463],[.665585994720459,.495950996875763],[.588353991508484,.546862006187439],[.757824003696442,.14767599105835],[.709249973297119,.201507985591888],[.672684013843536,.256581008434296],[.600408971309662,.74900496006012],[.55826598405838,.261672019958496],[.570303976535797,.187870979309082],[.588165998458862,.109044015407562],[.711045026779175,.398952007293701],[.781069993972778,.435405015945435],[.587247014045715,.398931980133057],[.742869973182678,.355445981025696],[.572156012058258,.437651991844177],[.55186802148819,.536570012569427],[.821442008018494,.457556009292603],[.752701997756958,.457181990146637],[.71375697851181,.467626988887787],[.66711300611496,.460672974586487],[.631101012229919,.447153985500336],[.6008620262146,.432473003864288],[.523481011390686,.405627012252808],[.810747981071472,.523926019668579],[.771045982837677,.348959028720856],[.509127020835876,.562718033790588],[.595292985439301,.485023975372314],[.980530977249146,.401564002037048],[.573499977588654,.420000016689301],[.602994978427887,.548687994480133],[.733529984951019,.376977026462555],[.560611009597778,.519016981124878],[.967685997486115,.644356966018677],[.580985009670258,.387160003185272],[.537728011608124,.505385041236877],[.760966002941132,.779752969741821],[.801778972148895,.831938028335571],[.892440974712372,.54076099395752],[.816350996494293,.740260004997253],[.865594983100891,.333687007427216],[.614073991775513,.883246004581451],[.508952975273132,.579437971115112],[.617941975593567,.508316040039062],[.825608015060425,.397674977779388],[.681214988231659,.39623498916626],[.656635999679565,.400596976280212],[.603900015354156,.710216999053955],[.81208598613739,.588539004325867],[.56801301240921,.944564998149872],[.681007981300354,.898285031318665],[.733752012252808,.869701027870178],[.633830010890961,.398822009563446],[.606792986392975,.39553701877594],[.589659988880157,.391062021255493],[.805015981197357,.342108011245728],[.611334979534149,.362284004688263],[.634037971496582,.355970978736877],[.656635999679565,.355356991291046],[.681214988231659,.35834002494812],[.698584973812103,.363156020641327],[.941866993904114,.319076001644135],[.698584973812103,.387449026107788],[.584177017211914,.624107003211975],[.554318010807037,.566076993942261],[.534153997898102,.62064003944397],[.711217999458313,.819975018501282],[.664629995822906,.852871000766754],[.559099972248077,.902631998062134],[.871706008911133,.791940987110138],[.591234028339386,.373893976211548],[.544341027736664,.451583981513977],[.624562978744507,.924192011356354],[.88577002286911,.615028977394104],[.551338016986847,.695277988910675],[.551980018615723,.704632043838501],[.552887976169586,.715808033943176],[.555167973041534,.730794012546539],[.569944024085999,.767035007476807],[.593203008174896,.685675978660583],[.599261999130249,.681069016456604],[.607599973678589,.677703022956848],[.631937980651855,.663500010967255],[.752032995223999,.601315021514893],[.547226011753082,.420395016670227],[.563543975353241,.359827995300293],[.583841025829315,.368713974952698],[.586614012718201,.692366003990173],[.771915018558502,.683578014373779],[.531597018241882,.352482974529266],[.588370978832245,.804440975189209],[.52079701423645,.442565023899078],[.567984998226166,.493479013442993],[.543282985687256,.819254994392395],[.655317008495331,.745514988899231],[.621008992195129,.574018001556396],[.625559985637665,.78031200170517],[.680198013782501,.570719003677368],[.64276397228241,.604337990283966],[.704662978649139,.621529996395111],[.552012026309967,.862591981887817],[.589071989059448,.508637011051178],[.685944974422455,.775357007980347],[.645735025405884,.812640011310577],[.675342977046967,.703978002071381],[.810858011245728,.646304965019226],[.72012197971344,.714666962623596],[.866151988506317,.682704985141754],[.663187026977539,.644596993923187],[.570082008838654,.466325998306274],[.544561982154846,.548375964164734],[.562758982181549,.558784961700439],[.531987011432648,.530140042304993],[.585271000862122,.335177004337311],[.622952997684479,.32277899980545],[.655896008014679,.320163011550903],[.687132000923157,.322345972061157],[.716481983661652,.333200991153717],[.758756995201111,.382786989212036],[.897013008594513,.468769013881683],[.732392013072968,.424547016620636],[.70211398601532,.433162987232208],[.66652500629425,.433866024017334],[.633504986763,.426087975502014],[.603875994682312,.416586995124817],[.579657971858978,.409945011138916],[.992439985275269,.480777025222778],[.567192018032074,.569419980049133],[.54136598110199,.478899002075195],[.526564002037048,.546118021011353],[.523913025856018,.563830018043518],[.531529009342194,.555056989192963],[.566035985946655,.582329034805298],[.51631098985672,.563053965568542],[.5174720287323,.577877044677734],[.573594987392426,.389806985855103],[.560697972774506,.395331978797913],[.549755990505219,.399751007556915],[.710287988185883,.368252992630005],[.723330020904541,.363372981548309]],K0=[127,34,139,11,0,37,232,231,120,72,37,39,128,121,47,232,121,128,104,69,67,175,171,148,157,154,155,118,50,101,73,39,40,9,151,108,48,115,131,194,204,211,74,40,185,80,42,183,40,92,186,230,229,118,202,212,214,83,18,17,76,61,146,160,29,30,56,157,173,106,204,194,135,214,192,203,165,98,21,71,68,51,45,4,144,24,23,77,146,91,205,50,187,201,200,18,91,106,182,90,91,181,85,84,17,206,203,36,148,171,140,92,40,39,193,189,244,159,158,28,247,246,161,236,3,196,54,68,104,193,168,8,117,228,31,189,193,55,98,97,99,126,47,100,166,79,218,155,154,26,209,49,131,135,136,150,47,126,217,223,52,53,45,51,134,211,170,140,67,69,108,43,106,91,230,119,120,226,130,247,63,53,52,238,20,242,46,70,156,78,62,96,46,53,63,143,34,227,173,155,133,123,117,111,44,125,19,236,134,51,216,206,205,154,153,22,39,37,167,200,201,208,36,142,100,57,212,202,20,60,99,28,158,157,35,226,113,160,159,27,204,202,210,113,225,46,43,202,204,62,76,77,137,123,116,41,38,72,203,129,142,64,98,240,49,102,64,41,73,74,212,216,207,42,74,184,169,170,211,170,149,176,105,66,69,122,6,168,123,147,187,96,77,90,65,55,107,89,90,180,101,100,120,63,105,104,93,137,227,15,86,85,129,102,49,14,87,86,55,8,9,100,47,121,145,23,22,88,89,179,6,122,196,88,95,96,138,172,136,215,58,172,115,48,219,42,80,81,195,3,51,43,146,61,171,175,199,81,82,38,53,46,225,144,163,110,246,33,7,52,65,66,229,228,117,34,127,234,107,108,69,109,108,151,48,64,235,62,78,191,129,209,126,111,35,143,163,161,246,117,123,50,222,65,52,19,125,141,221,55,65,3,195,197,25,7,33,220,237,44,70,71,139,122,193,245,247,130,33,71,21,162,153,158,159,170,169,150,188,174,196,216,186,92,144,160,161,2,97,167,141,125,241,164,167,37,72,38,12,145,159,160,38,82,13,63,68,71,226,35,111,158,153,154,101,50,205,206,92,165,209,198,217,165,167,97,220,115,218,133,112,243,239,238,241,214,135,169,190,173,133,171,208,32,125,44,237,86,87,178,85,86,179,84,85,180,83,84,181,201,83,182,137,93,132,76,62,183,61,76,184,57,61,185,212,57,186,214,207,187,34,143,156,79,239,237,123,137,177,44,1,4,201,194,32,64,102,129,213,215,138,59,166,219,242,99,97,2,94,141,75,59,235,24,110,228,25,130,226,23,24,229,22,23,230,26,22,231,112,26,232,189,190,243,221,56,190,28,56,221,27,28,222,29,27,223,30,29,224,247,30,225,238,79,20,166,59,75,60,75,240,147,177,215,20,79,166,187,147,213,112,233,244,233,128,245,128,114,188,114,217,174,131,115,220,217,198,236,198,131,134,177,132,58,143,35,124,110,163,7,228,110,25,356,389,368,11,302,267,452,350,349,302,303,269,357,343,277,452,453,357,333,332,297,175,152,377,384,398,382,347,348,330,303,304,270,9,336,337,278,279,360,418,262,431,304,408,409,310,415,407,270,409,410,450,348,347,422,430,434,313,314,17,306,307,375,387,388,260,286,414,398,335,406,418,364,367,416,423,358,327,251,284,298,281,5,4,373,374,253,307,320,321,425,427,411,421,313,18,321,405,406,320,404,405,315,16,17,426,425,266,377,400,369,322,391,269,417,465,464,386,257,258,466,260,388,456,399,419,284,332,333,417,285,8,346,340,261,413,441,285,327,460,328,355,371,329,392,439,438,382,341,256,429,420,360,364,394,379,277,343,437,443,444,283,275,440,363,431,262,369,297,338,337,273,375,321,450,451,349,446,342,467,293,334,282,458,461,462,276,353,383,308,324,325,276,300,293,372,345,447,382,398,362,352,345,340,274,1,19,456,248,281,436,427,425,381,256,252,269,391,393,200,199,428,266,330,329,287,273,422,250,462,328,258,286,384,265,353,342,387,259,257,424,431,430,342,353,276,273,335,424,292,325,307,366,447,345,271,303,302,423,266,371,294,455,460,279,278,294,271,272,304,432,434,427,272,407,408,394,430,431,395,369,400,334,333,299,351,417,168,352,280,411,325,319,320,295,296,336,319,403,404,330,348,349,293,298,333,323,454,447,15,16,315,358,429,279,14,15,316,285,336,9,329,349,350,374,380,252,318,402,403,6,197,419,318,319,325,367,364,365,435,367,397,344,438,439,272,271,311,195,5,281,273,287,291,396,428,199,311,271,268,283,444,445,373,254,339,263,466,249,282,334,296,449,347,346,264,447,454,336,296,299,338,10,151,278,439,455,292,407,415,358,371,355,340,345,372,390,249,466,346,347,280,442,443,282,19,94,370,441,442,295,248,419,197,263,255,359,440,275,274,300,383,368,351,412,465,263,467,466,301,368,389,380,374,386,395,378,379,412,351,419,436,426,322,373,390,388,2,164,393,370,462,461,164,0,267,302,11,12,374,373,387,268,12,13,293,300,301,446,261,340,385,384,381,330,266,425,426,423,391,429,355,437,391,327,326,440,457,438,341,382,362,459,457,461,434,430,394,414,463,362,396,369,262,354,461,457,316,403,402,315,404,403,314,405,404,313,406,405,421,418,406,366,401,361,306,408,407,291,409,408,287,410,409,432,436,410,434,416,411,264,368,383,309,438,457,352,376,401,274,275,4,421,428,262,294,327,358,433,416,367,289,455,439,462,370,326,2,326,370,305,460,455,254,449,448,255,261,446,253,450,449,252,451,450,256,452,451,341,453,452,413,464,463,441,413,414,258,442,441,257,443,442,259,444,443,260,445,444,467,342,445,459,458,250,289,392,290,290,328,460,376,433,435,250,290,392,411,416,433,341,463,464,453,464,465,357,465,412,343,412,399,360,363,440,437,399,456,420,456,363,401,435,288,372,383,353,339,255,249,448,261,255,133,243,190,133,155,112,33,246,247,33,130,25,398,384,286,362,398,414,362,463,341,263,359,467,263,249,255,466,467,260,75,60,166,238,239,79,162,127,139,72,11,37,121,232,120,73,72,39,114,128,47,233,232,128,103,104,67,152,175,148,173,157,155,119,118,101,74,73,40,107,9,108,49,48,131,32,194,211,184,74,185,191,80,183,185,40,186,119,230,118,210,202,214,84,83,17,77,76,146,161,160,30,190,56,173,182,106,194,138,135,192,129,203,98,54,21,68,5,51,4,145,144,23,90,77,91,207,205,187,83,201,18,181,91,182,180,90,181,16,85,17,205,206,36,176,148,140,165,92,39,245,193,244,27,159,28,30,247,161,174,236,196,103,54,104,55,193,8,111,117,31,221,189,55,240,98,99,142,126,100,219,166,218,112,155,26,198,209,131,169,135,150,114,47,217,224,223,53,220,45,134,32,211,140,109,67,108,146,43,91,231,230,120,113,226,247,105,63,52,241,238,242,124,46,156,95,78,96,70,46,63,116,143,227,116,123,111,1,44,19,3,236,51,207,216,205,26,154,22,165,39,167,199,200,208,101,36,100,43,57,202,242,20,99,56,28,157,124,35,113,29,160,27,211,204,210,124,113,46,106,43,204,96,62,77,227,137,116,73,41,72,36,203,142,235,64,240,48,49,64,42,41,74,214,212,207,183,42,184,210,169,211,140,170,176,104,105,69,193,122,168,50,123,187,89,96,90,66,65,107,179,89,180,119,101,120,68,63,104,234,93,227,16,15,85,209,129,49,15,14,86,107,55,9,120,100,121,153,145,22,178,88,179,197,6,196,89,88,96,135,138,136,138,215,172,218,115,219,41,42,81,5,195,51,57,43,61,208,171,199,41,81,38,224,53,225,24,144,110,105,52,66,118,229,117,227,34,234,66,107,69,10,109,151,219,48,235,183,62,191,142,129,126,116,111,143,7,163,246,118,117,50,223,222,52,94,19,141,222,221,65,196,3,197,45,220,44,156,70,139,188,122,245,139,71,162,145,153,159,149,170,150,122,188,196,206,216,92,163,144,161,164,2,167,242,141,241,0,164,37,11,72,12,144,145,160,12,38,13,70,63,71,31,226,111,157,158,154,36,101,205,203,206,165,126,209,217,98,165,97,237,220,218,237,239,241,210,214,169,140,171,32,241,125,237,179,86,178,180,85,179,181,84,180,182,83,181,194,201,182,177,137,132,184,76,183,185,61,184,186,57,185,216,212,186,192,214,187,139,34,156,218,79,237,147,123,177,45,44,4,208,201,32,98,64,129,192,213,138,235,59,219,141,242,97,97,2,141,240,75,235,229,24,228,31,25,226,230,23,229,231,22,230,232,26,231,233,112,232,244,189,243,189,221,190,222,28,221,223,27,222,224,29,223,225,30,224,113,247,225,99,60,240,213,147,215,60,20,166,192,187,213,243,112,244,244,233,245,245,128,188,188,114,174,134,131,220,174,217,236,236,198,134,215,177,58,156,143,124,25,110,7,31,228,25,264,356,368,0,11,267,451,452,349,267,302,269,350,357,277,350,452,357,299,333,297,396,175,377,381,384,382,280,347,330,269,303,270,151,9,337,344,278,360,424,418,431,270,304,409,272,310,407,322,270,410,449,450,347,432,422,434,18,313,17,291,306,375,259,387,260,424,335,418,434,364,416,391,423,327,301,251,298,275,281,4,254,373,253,375,307,321,280,425,411,200,421,18,335,321,406,321,320,405,314,315,17,423,426,266,396,377,369,270,322,269,413,417,464,385,386,258,248,456,419,298,284,333,168,417,8,448,346,261,417,413,285,326,327,328,277,355,329,309,392,438,381,382,256,279,429,360,365,364,379,355,277,437,282,443,283,281,275,363,395,431,369,299,297,337,335,273,321,348,450,349,359,446,467,283,293,282,250,458,462,300,276,383,292,308,325,283,276,293,264,372,447,346,352,340,354,274,19,363,456,281,426,436,425,380,381,252,267,269,393,421,200,428,371,266,329,432,287,422,290,250,328,385,258,384,446,265,342,386,387,257,422,424,430,445,342,276,422,273,424,306,292,307,352,366,345,268,271,302,358,423,371,327,294,460,331,279,294,303,271,304,436,432,427,304,272,408,395,394,431,378,395,400,296,334,299,6,351,168,376,352,411,307,325,320,285,295,336,320,319,404,329,330,349,334,293,333,366,323,447,316,15,315,331,358,279,317,14,316,8,285,9,277,329,350,253,374,252,319,318,403,351,6,419,324,318,325,397,367,365,288,435,397,278,344,439,310,272,311,248,195,281,375,273,291,175,396,199,312,311,268,276,283,445,390,373,339,295,282,296,448,449,346,356,264,454,337,336,299,337,338,151,294,278,455,308,292,415,429,358,355,265,340,372,388,390,466,352,346,280,295,442,282,354,19,370,285,441,295,195,248,197,457,440,274,301,300,368,417,351,465,251,301,389,385,380,386,394,395,379,399,412,419,410,436,322,387,373,388,326,2,393,354,370,461,393,164,267,268,302,12,386,374,387,312,268,13,298,293,301,265,446,340,380,385,381,280,330,425,322,426,391,420,429,437,393,391,326,344,440,438,458,459,461,364,434,394,428,396,262,274,354,457,317,316,402,316,315,403,315,314,404,314,313,405,313,421,406,323,366,361,292,306,407,306,291,408,291,287,409,287,432,410,427,434,411,372,264,383,459,309,457,366,352,401,1,274,4,418,421,262,331,294,358,435,433,367,392,289,439,328,462,326,94,2,370,289,305,455,339,254,448,359,255,446,254,253,449,253,252,450,252,256,451,256,341,452,414,413,463,286,441,414,286,258,441,258,257,442,257,259,443,259,260,444,260,467,445,309,459,250,305,289,290,305,290,460,401,376,435,309,250,392,376,411,433,453,341,464,357,453,465,343,357,412,437,343,399,344,360,440,420,437,456,360,420,363,361,401,288,265,372,353,390,339,249,339,448,255];var wr=[127,234,132,58,172,150,149,148,152,377,378,379,397,288,361,454,356,70,63,105,66,107,336,296,334,293,300,168,6,195,4,98,97,2,326,327,33,160,158,133,153,144,362,385,387,263,373,380,57,40,37,0,267,270,287,321,314,17,84,91,78,81,13,311,308,402,14,178],kr=[33,133,362,263,1,62,308,159,145,386,374,6,102,331,2,13,14,70,105,107,336,334,300,54,10,284,50,280,234,454,58,288,152],Er=[33,133,362,263,1,78,308],vs=wr.map(e=>Ce[e]),Ts=kr.map(e=>Ce[e]),Rs=Er.map(e=>Ce[e]);var le=e=>[Math.abs(e.endPoint[0]-e.startPoint[0]),Math.abs(e.endPoint[1]-e.startPoint[1])],Ye=e=>[e.startPoint[0]+(e.endPoint[0]-e.startPoint[0])/2,e.startPoint[1]+(e.endPoint[1]-e.startPoint[1])/2,1],Jt=(e,t)=>e?[Math.trunc(Math.max(0,e.startPoint[0])),Math.trunc(Math.max(0,e.startPoint[1])),Math.trunc(Math.min(t.shape[2]||0,e.endPoint[0])-Math.max(0,e.startPoint[0])),Math.trunc(Math.min(t.shape[1]||0,e.endPoint[1])-Math.max(0,e.startPoint[1]))]:[0,0,0,0],Yt=(e,t)=>e?[e.startPoint[0]/(t.shape[2]||0),e.startPoint[1]/(t.shape[1]||0),(e.endPoint[0]-e.startPoint[0])/(t.shape[2]||0),(e.endPoint[1]-e.startPoint[1])/(t.shape[1]||0)]:[0,0,0,0],Xo=(e,t)=>{let o=[e.startPoint[0]*t[0],e.startPoint[1]*t[1]],A=[e.endPoint[0]*t[0],e.endPoint[1]*t[1]];return{startPoint:o,endPoint:A,landmarks:e.landmarks,confidence:e.confidence}},qt=(e,t,o)=>{let A=t.shape[1],n=t.shape[2],s=[e.startPoint[1]/A,e.startPoint[0]/n,e.endPoint[1]/A,e.endPoint[0]/n],a=r.image.cropAndResize(t,[s],[0],o),i=r.div(a,L.tf255);return r.dispose(a),i},Ke=(e,t)=>{let o=Ye(e),A=le(e),n=[t*A[0]/2,t*A[1]/2];return{startPoint:[o[0]-n[0],o[1]-n[1]],endPoint:[o[0]+n[0],o[1]+n[1]],landmarks:e.landmarks,confidence:e.confidence}},Qe=e=>{let t=Ye(e),o=le(e),A=Math.max(...o)/2;return{startPoint:[Math.round(t[0]-A),Math.round(t[1]-A)],endPoint:[Math.round(t[0]+A),Math.round(t[1]+A)],landmarks:e.landmarks,confidence:e.confidence}},Do=e=>{let t=e.map(A=>A[0]),o=e.map(A=>A[1]);return{startPoint:[Math.min(...t),Math.min(...o)],endPoint:[Math.max(...t),Math.max(...o)],landmarks:e}},Ut=[[1,0,0],[0,1,0],[0,0,1]],zr=e=>e-2*Math.PI*Math.floor((e+Math.PI)/(2*Math.PI)),jr=(e,t)=>zr(Math.PI/2-Math.atan2(-(t[1]-e[1]),t[0]-e[0]));var Vo=(e,t)=>[[1,0,e],[0,1,t],[0,0,1]],Q0=(e,t)=>{let o=0;for(let A=0;A{let o=[];for(let A=0;A{let o=[],A=e.length;for(let n=0;n{let o=Math.cos(e),A=Math.sin(e),n=[[o,-A,0],[A,o,0],[0,0,1]],s=Vo(t[0],t[1]),a=Zo(s,n),i=Vo(-t[0],-t[1]);return Zo(a,i)},Wr=e=>{let t=[[e[0][0],e[1][0]],[e[0][1],e[1][1]]],o=[e[0][2],e[1][2]],A=[-Q0(t[0],o),-Q0(t[1],o)];return[t[0].concat(A[0]),t[1].concat(A[1]),[0,0,1]]},Cr=(e,t)=>[Q0(e,t[0]),Q0(e,t[1])];function Uo(e){let t={strides:[e/16,e/8],anchors:[2,6]},o=[];for(let A=0;A[s[0]/n*(f[0]-n/2),s[1]/n*(f[1]-n/2),f[2]||0]),i=o&&o!==0&&Math.abs(o)>.2,x=i?qo(o,[0,0]):Ut,d=i?a.map(f=>[...Cr(f,x),f[2]]):a,l=i?Wr(A):Ut,y=Ye(t),c=[Q0(y,l[0]),Q0(y,l[1])];return d.map(f=>[Math.trunc(f[0]+c[0]),Math.trunc(f[1]+c[1]),Math.trunc(f[2]||0)])}function Yo(e,t,o,A){let n=t.landmarks.length>=Zt.count?Zt.symmetryLine:We.symmetryLine,s=0,a=Ut,i;if(e&&R.kernels.includes("rotatewithoffset"))if(s=jr(t.landmarks[n[0]],t.landmarks[n[1]]),s&&s!==0&&Math.abs(s)>.2){let d=Ye(t),l=[d[0]/o.shape[2],d[1]/o.shape[1]],y=r.image.rotateWithOffset(o,s,0,l);a=qo(-s,d),i=qt(t,y,[A,A]),r.dispose(y)}else i=qt(t,o,[A,A]);else i=qt(t,o,[A,A]);return[s,a,i]}var Ir=e=>{let t=e.map(A=>A[0]),o=e.map(A=>A[1]);return[Math.min(...t)+(Math.max(...t)-Math.min(...t))/2,Math.min(...o)+(Math.max(...o)-Math.min(...o))/2]},Ko=(e,t)=>{let o=Ir(e),A=le(t);return{startPoint:[o[0]-A[0]/2,o[1]-A[1]/2],endPoint:[o[0]+A[0]/2,o[1]+A[1]/2]}};var Qo=6,Or=1.2,T0,_o=null,H0=0,Ie=null,_e=()=>H0;async function $o(e){var t;return R.initial&&(T0=null),T0?e.debug&&g("cached model:",T0.modelUrl):T0=await N((t=e.face.detector)==null?void 0:t.modelPath),H0=T0.inputs[0].shape?T0.inputs[0].shape[2]:0,Ie=r.scalar(H0,"int32"),_o=r.tensor2d(Uo(H0)),T0}function Nr(e){let t={};t.boxStarts=r.slice(e,[0,1],[-1,2]),t.centers=r.add(t.boxStarts,_o),t.boxSizes=r.slice(e,[0,3],[-1,2]),t.boxSizesNormalized=r.div(t.boxSizes,Ie),t.centersNormalized=r.div(t.centers,Ie),t.halfBoxSize=r.div(t.boxSizesNormalized,L.tf2),t.starts=r.sub(t.centersNormalized,t.halfBoxSize),t.ends=r.add(t.centersNormalized,t.halfBoxSize),t.startNormalized=r.mul(t.starts,Ie),t.endNormalized=r.mul(t.ends,Ie);let o=r.concat2d([t.startNormalized,t.endNormalized],1);return Object.keys(t).forEach(A=>r.dispose(t[A])),o}async function eA(e,t){var i,x,d,l;if(!e||e.isDisposedInternal||e.shape.length!==4||e.shape[1]<1||e.shape[2]<1)return[];let o={};o.resized=r.image.resizeBilinear(e,[H0,H0]),o.div=r.div(o.resized,L.tf127),o.normalized=r.sub(o.div,L.tf05);let A=T0==null?void 0:T0.execute(o.normalized);if(Array.isArray(A)){let y=A.sort((c,f)=>c.size-f.size);o.concat384=r.concat([y[0],y[2]],2),o.concat512=r.concat([y[1],y[3]],2),o.concat=r.concat([o.concat512,o.concat384],1),o.batch=r.squeeze(o.concat,0)}else o.batch=r.squeeze(A);r.dispose(A),o.boxes=Nr(o.batch),o.logits=r.slice(o.batch,[0,0],[-1,1]),o.sigmoid=r.sigmoid(o.logits),o.scores=r.squeeze(o.sigmoid),o.nms=await r.image.nonMaxSuppressionAsync(o.boxes,o.scores,((i=t.face.detector)==null?void 0:i.maxDetected)||0,((x=t.face.detector)==null?void 0:x.iouThreshold)||0,((d=t.face.detector)==null?void 0:d.minConfidence)||0);let n=await o.nms.array(),s=[],a=await o.scores.data();for(let y=0;y(((l=t.face.detector)==null?void 0:l.minConfidence)||0)){let f={};f.bbox=r.slice(o.boxes,[n[y],0],[1,-1]),f.slice=r.slice(o.batch,[n[y],Qo-1],[1,-1]),f.squeeze=r.squeeze(f.slice),f.landmarks=r.reshape(f.squeeze,[Qo,-1]);let h=await f.bbox.data(),m={startPoint:[h[0],h[1]],endPoint:[h[2],h[3]],landmarks:await f.landmarks.array(),confidence:c},v=Xo(m,[(e.shape[2]||0)/H0,(e.shape[1]||0)/H0]),b=Ke(v,t.face.scale||Or),P=Qe(b);s.push(P),Object.keys(f).forEach(p=>r.dispose(f[p]))}}return Object.keys(o).forEach(y=>r.dispose(o[y])),s}var $e={};Ze($e,{connected:()=>_t,kpt:()=>Qt});var Qt=["nose","leftEyeInside","leftEye","leftEyeOutside","rightEyeInside","rightEye","rightEyeOutside","leftEar","rightEar","leftMouth","rightMouth","leftShoulder","rightShoulder","leftElbow","rightElbow","leftWrist","rightWrist","leftPinky","rightPinky","leftIndex","rightIndex","leftThumb","rightThumb","leftHip","rightHip","leftKnee","rightKnee","leftAnkle","rightAnkle","leftHeel","rightHeel","leftFoot","rightFoot","bodyCenter","bodyTop","leftPalm","leftHand","rightPalm","rightHand"],_t={shoulders:["leftShoulder","rightShoulder"],hips:["rightHip","leftHip"],mouth:["leftMouth","rightMouth"],leftLegUpper:["leftHip","leftKnee"],leftLegLower:["leftKnee","leftAnkle"],leftFoot:["leftAnkle","leftHeel","leftFoot"],leftTorso:["leftShoulder","leftHip"],leftArmUpper:["leftShoulder","leftElbow"],leftArmLower:["leftElbow","leftWrist"],leftHand:["leftWrist","leftPalm"],leftHandPinky:["leftPalm","leftPinky"],leftHandIndex:["leftPalm","leftIndex"],leftHandThumb:["leftPalm","leftThumb"],leftEyeOutline:["leftEyeInside","leftEyeOutside"],rightLegUpper:["rightHip","rightKnee"],rightLegLower:["rightKnee","rightAnkle"],rightFoot:["rightAnkle","rightHeel","rightFoot"],rightTorso:["rightShoulder","rightHip"],rightArmUpper:["rightShoulder","rightElbow"],rightArmLower:["rightElbow","rightWrist"],rightHand:["rightWrist","rightPalm"],rightHandPinky:["rightPalm","rightPinky"],rightHandIndex:["rightPalm","rightIndex"],rightHandThumb:["rightPalm","rightThumb"],rightEyeOutline:["rightEyeInside","rightEyeOutside"]};var oA=224,Lr,Br=5,et=[8,16,32,32,32];async function AA(){let e=[],t=0;for(;to.x)),y:r.tensor1d(e.map(o=>o.y))}}function S0(e,t=[1,1]){let o=[e.map(i=>i[0]),e.map(i=>i[1])],A=[Math.min(...o[0]),Math.min(...o[1])],n=[Math.max(...o[0]),Math.max(...o[1])],s=[A[0],A[1],n[0]-A[0],n[1]-A[1]],a=[s[0]/t[0],s[1]/t[1],s[2]/t[0],s[3]/t[1]];return{box:s,boxRaw:a}}function nA(e,t=[1,1]){let o=[e.map(d=>d[0]),e.map(d=>d[1])],A=[Math.min(...o[0]),Math.min(...o[1])],n=[Math.max(...o[0]),Math.max(...o[1])],s=[(A[0]+n[0])/2,(A[1]+n[1])/2],a=Math.max(s[0]-A[0],s[1]-A[1],-s[0]+n[0],-s[1]+n[1]),i=[Math.trunc(s[0]-a),Math.trunc(s[1]-a),Math.trunc(2*a),Math.trunc(2*a)],x=[i[0]/t[0],i[1]/t[1],i[2]/t[0],i[3]/t[1]];return{box:i,boxRaw:x}}function tt(e,t){let o=[e[2]*t,e[3]*t];return[e[0]-(o[0]-e[2])/2,e[1]-(o[1]-e[3])/2,o[0],o[1]]}var aA={initial:!0},y0={detector:null,landmarks:null},ye={detector:[224,224],landmarks:[256,256]},$t=Number.MAX_SAFE_INTEGER,Hr={landmarks:["ld_3d","activation_segmentation","activation_heatmap","world_3d","output_poseflag"],detector:[]},At=null,Oe,F0=[[0,0],[0,0],[0,0],[0,0]],rA=0,sA=e=>1-1/(1+Math.exp(e));async function iA(e){if(aA.initial&&(y0.detector=null),!y0.detector&&e.body.detector&&e.body.detector.modelPath){y0.detector=await N(e.body.detector.modelPath);let t=Object.values(y0.detector.modelSignature.inputs);ye.detector[0]=Array.isArray(t)?parseInt(t[0].tensorShape.dim[1].size):0,ye.detector[1]=Array.isArray(t)?parseInt(t[0].tensorShape.dim[2].size):0}else e.debug&&y0.detector&&g("cached model:",y0.detector.modelUrl);return await AA(),y0.detector}async function lA(e){if(aA.initial&&(y0.landmarks=null),y0.landmarks)e.debug&&g("cached model:",y0.landmarks.modelUrl);else{y0.landmarks=await N(e.body.modelPath);let t=Object.values(y0.landmarks.modelSignature.inputs);ye.landmarks[0]=Array.isArray(t)?parseInt(t[0].tensorShape.dim[1].size):0,ye.landmarks[1]=Array.isArray(t)?parseInt(t[0].tensorShape.dim[2].size):0}return y0.landmarks}async function Fr(e,t){let o={};if(!e.shape||!e.shape[1]||!e.shape[2])return e;let A;if(Oe&&(o.cropped=r.image.cropAndResize(e,[Oe],[0],[e.shape[1],e.shape[2]])),e.shape[1]!==e.shape[2]){let n=[e.shape[2]>e.shape[1]?Math.trunc((e.shape[2]-e.shape[1])/2):0,e.shape[2]>e.shape[1]?Math.trunc((e.shape[2]-e.shape[1])/2):0],s=[e.shape[1]>e.shape[2]?Math.trunc((e.shape[1]-e.shape[2])/2):0,e.shape[1]>e.shape[2]?Math.trunc((e.shape[1]-e.shape[2])/2):0];F0=[[0,0],n,s,[0,0]],o.pad=r.pad(o.cropped||e,F0),o.resize=r.image.resizeBilinear(o.pad,[t,t]),A=r.div(o.resize,L.tf255)}else e.shape[1]!==t?(o.resize=r.image.resizeBilinear(o.cropped||e,[t,t]),A=r.div(o.resize,L.tf255)):A=r.div(o.cropped||e,L.tf255);return Object.keys(o).forEach(n=>r.dispose(o[n])),A}function Vr(e,t){for(let o of e)o.position=[Math.trunc(o.position[0]*(t[0]+F0[2][0]+F0[2][1])/t[0]-F0[2][0]),Math.trunc(o.position[1]*(t[1]+F0[1][0]+F0[1][1])/t[1]-F0[1][0]),o.position[2]],o.positionRaw=[o.position[0]/t[0],o.position[1]/t[1],2*o.position[2]/(t[0]+t[1])];if(Oe)for(let o of e)o.positionRaw=[o.positionRaw[0]+Oe[1],o.positionRaw[1]+Oe[0],o.positionRaw[2]],o.position=[Math.trunc(o.positionRaw[0]*t[0]),Math.trunc(o.positionRaw[1]*t[1]),o.positionRaw[2]];return e}async function Zr(e){let t=e.find(i=>i.part==="leftPalm"),o=e.find(i=>i.part==="leftWrist"),A=e.find(i=>i.part==="leftIndex");t.position[2]=((o.position[2]||0)+(A.position[2]||0))/2;let n=e.find(i=>i.part==="rightPalm"),s=e.find(i=>i.part==="rightWrist"),a=e.find(i=>i.part==="rightIndex");n.position[2]=((s.position[2]||0)+(a.position[2]||0))/2}async function Xr(e,t,o){var h;let A={};[A.ld,A.segmentation,A.heatmap,A.world,A.poseflag]=(h=y0.landmarks)==null?void 0:h.execute(e,Hr.landmarks);let n=(await A.poseflag.data())[0],s=await A.ld.data(),a=await A.world.data();Object.keys(A).forEach(m=>r.dispose(A[m]));let i=[],x=5;for(let m=0;mm.position),y=S0(l,[o[0],o[1]]),c={};for(let[m,v]of Object.entries(_t)){let b=[];for(let P=0;PE.part===v[P]),u=d.find(E=>E.part===v[P+1]);p&&u&&b.push([p.position,u.position])}c[m]=b}return{id:0,score:Math.trunc(100*n)/100,box:y.box,boxRaw:y.boxRaw,keypoints:d,annotations:c}}async function e5(e,t){let o=[e.shape[2]||0,e.shape[1]||0],A=(t.body.skipTime||0)>M()-rA,n=$t<(t.body.skipFrames||0);if(t.skipAllowed&&A&&n&&At!==null)$t++;else{let s={};s.landmarks=await Fr(e,256),At=await Xr(s.landmarks,t,o),Object.keys(s).forEach(a=>r.dispose(s[a])),rA=M(),$t=0}return At?[At]:[]}var xe=[{class:1,label:"person"},{class:2,label:"bicycle"},{class:3,label:"car"},{class:4,label:"motorcycle"},{class:5,label:"airplane"},{class:6,label:"bus"},{class:7,label:"train"},{class:8,label:"truck"},{class:9,label:"boat"},{class:10,label:"traffic light"},{class:11,label:"fire hydrant"},{class:12,label:"stop sign"},{class:13,label:"parking meter"},{class:14,label:"bench"},{class:15,label:"bird"},{class:16,label:"cat"},{class:17,label:"dog"},{class:18,label:"horse"},{class:19,label:"sheep"},{class:20,label:"cow"},{class:21,label:"elephant"},{class:22,label:"bear"},{class:23,label:"zebra"},{class:24,label:"giraffe"},{class:25,label:"backpack"},{class:26,label:"umbrella"},{class:27,label:"handbag"},{class:28,label:"tie"},{class:29,label:"suitcase"},{class:30,label:"frisbee"},{class:31,label:"skis"},{class:32,label:"snowboard"},{class:33,label:"sports ball"},{class:34,label:"kite"},{class:35,label:"baseball bat"},{class:36,label:"baseball glove"},{class:37,label:"skateboard"},{class:38,label:"surfboard"},{class:39,label:"tennis racket"},{class:40,label:"bottle"},{class:41,label:"wine glass"},{class:42,label:"cup"},{class:43,label:"fork"},{class:44,label:"knife"},{class:45,label:"spoon"},{class:46,label:"bowl"},{class:47,label:"banana"},{class:48,label:"apple"},{class:49,label:"sandwich"},{class:50,label:"orange"},{class:51,label:"broccoli"},{class:52,label:"carrot"},{class:53,label:"hot dog"},{class:54,label:"pizza"},{class:55,label:"donut"},{class:56,label:"cake"},{class:57,label:"chair"},{class:58,label:"couch"},{class:59,label:"potted plant"},{class:60,label:"bed"},{class:61,label:"dining table"},{class:62,label:"toilet"},{class:63,label:"tv"},{class:64,label:"laptop"},{class:65,label:"mouse"},{class:66,label:"remote"},{class:67,label:"keyboard"},{class:68,label:"cell phone"},{class:69,label:"microwave"},{class:70,label:"oven"},{class:71,label:"toaster"},{class:72,label:"sink"},{class:73,label:"refrigerator"},{class:74,label:"book"},{class:75,label:"clock"},{class:76,label:"vase"},{class:77,label:"scissors"},{class:78,label:"teddy bear"},{class:79,label:"hair drier"},{class:80,label:"toothbrush"}];var W0,_0=0,t5=[],xA=0,o5=Number.MAX_SAFE_INTEGER;async function cA(e){if(R.initial&&(W0=null),W0)e.debug&&g("cached model:",W0.modelUrl);else{W0=await N(e.object.modelPath);let t=Object.values(W0.modelSignature.inputs);_0=Array.isArray(t)?parseInt(t[0].tensorShape.dim[2].size):0}return W0}async function Dr(e,t,o){if(!e)return[];let A={},n=[],s=await e.array();A.squeeze=r.squeeze(e);let a=r.split(A.squeeze,6,1);A.stack=r.stack([a[1],a[0],a[3],a[2]],1),A.boxes=r.squeeze(A.stack),A.scores=r.squeeze(a[4]),A.classes=r.squeeze(a[5]),r.dispose([e,...a]),A.nms=await r.image.nonMaxSuppressionAsync(A.boxes,A.scores,o.object.maxDetected,o.object.iouThreshold,o.object.minConfidence||0);let i=await A.nms.data(),x=0;for(let d of Array.from(i)){let l=Math.trunc(100*s[0][d][4])/100,y=s[0][d][5],c=xe[y].label,[f,h]=[s[0][d][0]/_0,s[0][d][1]/_0],m=[f,h,s[0][d][2]/_0-f,s[0][d][3]/_0-h],v=[Math.trunc(m[0]*t[0]),Math.trunc(m[1]*t[1]),Math.trunc(m[2]*t[0]),Math.trunc(m[3]*t[1])];n.push({id:x++,score:l,class:y,label:c,box:v,boxRaw:m})}return Object.keys(A).forEach(d=>r.dispose(A[d])),n}async function A5(e,t){let o=(t.object.skipTime||0)>M()-xA,A=o5<(t.object.skipFrames||0);return t.skipAllowed&&o&&A&&t5.length>0?(o5++,t5):(o5=0,new Promise(async n=>{let s=[e.shape[2]||0,e.shape[1]||0],a=r.image.resizeBilinear(e,[_0,_0]),i=t.object.enabled?W0==null?void 0:W0.execute(a,["tower_0/detections"]):null;xA=M(),r.dispose(a);let x=await Dr(i,s,t);t5=x,n(x)}))}var nt={};Ze(nt,{connected:()=>r5,kpt:()=>n5});var n5=["head","neck","rightShoulder","rightElbow","rightWrist","chest","leftShoulder","leftElbow","leftWrist","bodyCenter","rightHip","rightKnee","rightAnkle","leftHip","leftKnee","leftAnkle"],r5={leftLeg:["leftHip","leftKnee","leftAnkle"],rightLeg:["rightHip","rightKnee","rightAnkle"],torso:["leftShoulder","rightShoulder","rightHip","leftHip","leftShoulder"],leftArm:["leftShoulder","leftElbow","leftWrist"],rightArm:["rightShoulder","rightElbow","rightWrist"],head:[]};var n0,fA=0,a0={id:0,keypoints:[],box:[0,0,0,0],boxRaw:[0,0,0,0],score:0,annotations:{}},s5=Number.MAX_SAFE_INTEGER;async function mA(e){return R.initial&&(n0=null),n0?e.debug&&g("cached model:",n0.modelUrl):n0=await N(e.body.modelPath),n0}async function qr(e,t){let[o,A]=e.shape,n=r.reshape(e,[A*o]),s=r.max(n,0),a=(await s.data())[0];if(r.dispose([n,s]),a>t){let i=r.argMax(n,0),x=r.mod(i,o),d=(await x.data())[0],l=r.div(i,r.scalar(o,"int32")),y=(await l.data())[0];return r.dispose([x,l]),[d,y,a]}return[0,0,a]}async function a5(e,t){let o=(t.body.skipTime||0)>M()-fA,A=s5<(t.body.skipFrames||0);return t.skipAllowed&&o&&A&&Object.keys(a0.keypoints).length>0?(s5++,[a0]):(s5=0,new Promise(async n=>{var y;let s=r.tidy(()=>{if(!(n0==null?void 0:n0.inputs[0].shape))return null;let c=r.image.resizeBilinear(e,[n0.inputs[0].shape[2],n0.inputs[0].shape[1]],!1),f=r.mul(c,L.tf2);return r.sub(f,L.tf1)}),a;if(t.body.enabled&&(a=n0==null?void 0:n0.execute(s)),fA=M(),r.dispose(s),a){a0.keypoints.length=0;let c=a.squeeze();r.dispose(a);let f=c.unstack(2);r.dispose(c);for(let h=0;h(((y=t.body)==null?void 0:y.minConfidence)||0)&&a0.keypoints.push({score:Math.round(100*b)/100,part:n5[h],positionRaw:[m/n0.inputs[0].shape[2],v/n0.inputs[0].shape[1]],position:[Math.round(e.shape[2]*m/n0.inputs[0].shape[2]),Math.round(e.shape[1]*v/n0.inputs[0].shape[1])]})}f.forEach(h=>r.dispose(h))}a0.score=a0.keypoints.reduce((c,f)=>f.score>c?f.score:c,0);let i=a0.keypoints.map(c=>c.position[0]),x=a0.keypoints.map(c=>c.position[1]);a0.box=[Math.min(...i),Math.min(...x),Math.max(...i)-Math.min(...i),Math.max(...x)-Math.min(...x)];let d=a0.keypoints.map(c=>c.positionRaw[0]),l=a0.keypoints.map(c=>c.positionRaw[1]);a0.boxRaw=[Math.min(...d),Math.min(...l),Math.max(...d)-Math.min(...d),Math.max(...l)-Math.min(...l)];for(let[c,f]of Object.entries(r5)){let h=[];for(let m=0;mP.part===f[m]),b=a0.keypoints.find(P=>P.part===f[m+1]);v&&b&&v.score>(t.body.minConfidence||0)&&b.score>(t.body.minConfidence||0)&&h.push([v.position,b.position])}a0.annotations[c]=h}n([a0])}))}var Ur=["angry","disgust","fear","happy","sad","surprise","neutral"],p0,rt=[],uA=0,hA=0,i5=Number.MAX_SAFE_INTEGER;async function bA(e){var t;return R.initial&&(p0=null),p0?e.debug&&g("cached model:",p0.modelUrl):p0=await N((t=e.face.emotion)==null?void 0:t.modelPath),p0}async function l5(e,t,o,A){var a,i;if(!p0)return[];let n=i5<(((a=t.face.emotion)==null?void 0:a.skipFrames)||0),s=(((i=t.face.emotion)==null?void 0:i.skipTime)||0)>M()-hA;return t.skipAllowed&&s&&n&&uA===A&&rt[o]&&rt[o].length>0?(i5++,rt[o]):(i5=0,new Promise(async x=>{var l,y;let d=[];if((l=t.face.emotion)==null?void 0:l.enabled){let c={},f=(p0==null?void 0:p0.inputs[0].shape)?p0.inputs[0].shape[2]:0;c.resize=r.image.resizeBilinear(e,[f,f],!1),c.channels=r.mul(c.resize,L.rgb),c.grayscale=r.sum(c.channels,3,!0),c.grayscaleSub=r.sub(c.grayscale,L.tf05),c.grayscaleMul=r.mul(c.grayscaleSub,L.tf2),c.emotion=p0==null?void 0:p0.execute(c.grayscaleMul),hA=M();let h=await c.emotion.data();for(let m=0;m(((y=t.face.emotion)==null?void 0:y.minConfidence)||0)&&d.push({score:Math.min(.99,Math.trunc(100*h[m])/100),emotion:Ur[m]});d.sort((m,v)=>v.score-m.score),Object.keys(c).forEach(m=>r.dispose(c[m]))}rt[o]=d,uA=A,x(d)}))}var x0,y5=[],MA=0,PA=0,vA=Number.MAX_SAFE_INTEGER;async function TA(e){return R.initial&&(x0=null),x0?e.debug&&g("cached model:",x0.modelUrl):x0=await N(e.face.mobilefacenet.modelPath),x0}async function x5(e,t,o,A){var a,i;if(!x0)return[];let n=vA<(((a=t.face.embedding)==null?void 0:a.skipFrames)||0),s=(((i=t.face.embedding)==null?void 0:i.skipTime)||0)>M()-PA;return t.skipAllowed&&s&&n&&MA===A&&y5[o]?(vA++,y5[o]):new Promise(async x=>{var l;let d=[];if(((l=t.face.embedding)==null?void 0:l.enabled)&&(x0==null?void 0:x0.inputs[0].shape)){let y={};y.crop=r.image.resizeBilinear(e,[x0.inputs[0].shape[2],x0.inputs[0].shape[1]],!1),y.data=x0==null?void 0:x0.execute(y.crop);let c=await y.data.data();d=Array.from(c)}y5[o]=d,MA=A,PA=M(),x(d)})}var C0,V0=0,Jr=2.3,c5=b0.leftEyeLower0,d5=b0.rightEyeLower0,ce={leftBounds:[c5[0],c5[c5.length-1]],rightBounds:[d5[0],d5[d5.length-1]]},de={upperCenter:3,lowerCenter:4,index:71,numCoordinates:76};async function zA(e){var t;return R.initial&&(C0=null),C0?e.debug&&g("cached model:",C0.modelUrl):C0=await N((t=e.face.iris)==null?void 0:t.modelPath),V0=C0.inputs[0].shape?C0.inputs[0].shape[2]:0,V0===-1&&(V0=64),C0}function st(e,t,o,A){for(let n=0;n{let t=e[ce.leftBounds[0]][2],o=e[ce.rightBounds[0]][2];return t-o},wA=(e,t,o,A,n,s=!1)=>{let a=Qe(Ke(Do([e[o],e[A]]),Jr)),i=le(a),x=r.image.cropAndResize(t,[[a.startPoint[1]/n,a.startPoint[0]/n,a.endPoint[1]/n,a.endPoint[0]/n]],[0],[V0,V0]);if(s&&R.kernels.includes("flipleftright")){let d=r.image.flipLeftRight(x);r.dispose(x),x=d}return{box:a,boxSize:i,crop:x}},kA=(e,t,o,A=!1)=>{let n=[];for(let s=0;s{let A=e[b0[`${o}EyeUpper0`][de.upperCenter]][2],n=e[b0[`${o}EyeLower0`][de.lowerCenter]][2],s=(A+n)/2;return t.map((a,i)=>{let x=s;return i===2?x=A:i===4&&(x=n),[a[0],a[1],x]})};async function jA(e,t,o,A){if(!C0)return o.debug&&g("face mesh iris detection requested, but model is not loaded"),e;let{box:n,boxSize:s,crop:a}=wA(e,t,ce.leftBounds[0],ce.leftBounds[1],A,!0),{box:i,boxSize:x,crop:d}=wA(e,t,ce.rightBounds[0],ce.rightBounds[1],A,!0),l=r.concat([a,d]);r.dispose(a),r.dispose(d);let y=C0.execute(l);r.dispose(l);let c=await y.data();r.dispose(y);let f=c.slice(0,de.numCoordinates*3),{rawCoords:h,iris:m}=kA(f,n,s,!0),v=c.slice(de.numCoordinates*3),{rawCoords:b,iris:P}=kA(v,i,x),p=Yr(e);Math.abs(p)<30?(st(e,h,"left",null),st(e,b,"right",null)):p<1?st(e,h,"left",["EyeUpper0","EyeLower0"]):st(e,b,"right",["EyeUpper0","EyeLower0"]);let u=EA(e,m,"left"),E=EA(e,P,"right");return e.concat(u).concat(E)}var R0={boxes:[],skipped:Number.MAX_SAFE_INTEGER,timestamp:0},I0=null,fe=0;async function WA(e,t){var i,x,d,l,y,c,f,h,m;let o=(((i=t.face.detector)==null?void 0:i.skipTime)||0)>M()-R0.timestamp,A=R0.skipped<(((x=t.face.detector)==null?void 0:x.skipFrames)||0);!t.skipAllowed||!o||!A||R0.boxes.length===0?(R0.boxes=await eA(e,t),R0.timestamp=M(),R0.skipped=0):R0.skipped++;let n=[],s=[],a=0;for(let v=0;v[H[0]/(e.shape[2]||0),H[1]/(e.shape[1]||0),(H[2]||0)/fe]);for(let H of Object.keys(b0))u.annotations[H]=b0[H].map(t0=>u.mesh[t0]);u.score=u.faceScore;let B={...Ko(u.mesh,b),confidence:b.confidence,landmarks:b.landmarks};u.box=Jt(B,e),u.boxRaw=Yt(B,e),s.push(B)}}else{u.box=Jt(b,e),u.boxRaw=Yt(b,e),u.score=u.boxScore,u.mesh=b.landmarks.map(E=>[(b.startPoint[0]+b.endPoint[0])/2+(b.endPoint[0]+b.startPoint[0])*E[0]/_e(),(b.startPoint[1]+b.endPoint[1])/2+(b.endPoint[1]+b.startPoint[1])*E[1]/_e()]),u.meshRaw=u.mesh.map(E=>[E[0]/(e.shape[2]||0),E[1]/(e.shape[1]||0),(E[2]||0)/fe]);for(let E of Object.keys(We))u.annotations[E]=[u.mesh[We[E]]]}u.score>(((m=t.face.detector)==null?void 0:m.minConfidence)||1)?n.push(u):r.dispose(u.tensor)}return R0.boxes=s,n}async function CA(e){var t;return R.initial&&(I0=null),I0?e.debug&&g("cached model:",I0.modelUrl):I0=await N((t=e.face.mesh)==null?void 0:t.modelPath),fe=I0.inputs[0].shape?I0.inputs[0].shape[2]:0,I0}var IA=K0,OA=Ce;var c0,at=[],NA=0,LA=0,m5=Number.MAX_SAFE_INTEGER;async function BA(e){var t;return R.initial&&(c0=null),c0?e.debug&&g("cached model:",c0.modelUrl):c0=await N((t=e.face.description)==null?void 0:t.modelPath),c0}function p5(e){let t=e.image||e.tensor||e;if(!(c0==null?void 0:c0.inputs[0].shape))return t;let o=r.image.resizeBilinear(t,[c0.inputs[0].shape[2],c0.inputs[0].shape[1]],!1),A=r.mul(o,L.tf255);return r.dispose(o),A}async function u5(e,t,o,A){var a,i,x,d;if(!c0)return{age:0,gender:"unknown",genderScore:0,descriptor:[]};let n=m5<(((a=t.face.description)==null?void 0:a.skipFrames)||0),s=(((i=t.face.description)==null?void 0:i.skipTime)||0)>M()-NA;return t.skipAllowed&&n&&s&&LA===A&&((x=at[o])==null?void 0:x.age)&&((d=at[o])==null?void 0:d.age)>0?(m5++,at[o]):(m5=0,new Promise(async l=>{var c,f;let y={age:0,gender:"unknown",genderScore:0,descriptor:[]};if((c=t.face.description)==null?void 0:c.enabled){let h=p5(e),m=c0==null?void 0:c0.execute(h);NA=M(),r.dispose(h);let b=await(await m.find(O=>O.shape[1]===1)).data(),P=Math.trunc(200*Math.abs(b[0]-.5))/100;P>(((f=t.face.description)==null?void 0:f.minConfidence)||0)&&(y.gender=b[0]<=.5?"female":"male",y.genderScore=Math.min(.99,P));let p=r.argMax(m.find(O=>O.shape[1]===100),1),u=(await p.data())[0];r.dispose(p);let W=await m.find(O=>O.shape[1]===100).data();y.age=Math.round(W[u-1]>W[u+1]?10*u-100*W[u-1]:10*u+100*W[u+1])/10;let C=m.find(O=>O.shape[1]===1024),S=C?await C.data():[];y.descriptor=Array.from(S),m.forEach(O=>r.dispose(O))}at[o]=y,LA=A,l(y)}))}function it(e){return[Math.abs(e.endPoint[0]-e.startPoint[0]),Math.abs(e.endPoint[1]-e.startPoint[1])]}function Ne(e){return[e.startPoint[0]+(e.endPoint[0]-e.startPoint[0])/2,e.startPoint[1]+(e.endPoint[1]-e.startPoint[1])/2]}function FA(e,t,o){let A=t.shape[1],n=t.shape[2],s=[[e.startPoint[1]/A,e.startPoint[0]/n,e.endPoint[1]/A,e.endPoint[0]/n]];return r.image.cropAndResize(t,s,[0],o)}function VA(e,t){let o=[e.startPoint[0]*t[0],e.startPoint[1]*t[1]],A=[e.endPoint[0]*t[0],e.endPoint[1]*t[1]],n=e.palmLandmarks.map(s=>[s[0]*t[0],s[1]*t[1]]);return{startPoint:o,endPoint:A,palmLandmarks:n,confidence:e.confidence}}function lt(e,t=1.5){let o=Ne(e),A=it(e),n=[t*A[0]/2,t*A[1]/2],s=[o[0]-n[0],o[1]-n[1]],a=[o[0]+n[0],o[1]+n[1]];return{startPoint:s,endPoint:a,palmLandmarks:e.palmLandmarks}}function yt(e){let t=Ne(e),o=it(e),n=Math.max(...o)/2,s=[t[0]-n,t[1]-n],a=[t[0]+n,t[1]+n];return{startPoint:s,endPoint:a,palmLandmarks:e.palmLandmarks}}function Kr(e){return e-2*Math.PI*Math.floor((e+Math.PI)/(2*Math.PI))}function ZA(e,t){let o=Math.PI/2-Math.atan2(-(t[1]-e[1]),t[0]-e[0]);return Kr(o)}var GA=(e,t)=>[[1,0,e],[0,1,t],[0,0,1]];function Z0(e,t){let o=0;for(let A=0;A[o.x,o.y]),this.anchorsTensor=r.tensor2d(this.anchors),this.inputSize=this.model&&this.model.inputs&&this.model.inputs[0].shape?this.model.inputs[0].shape[2]:0,this.inputSizeTensor=r.tensor1d([this.inputSize,this.inputSize]),this.doubleInputSizeTensor=r.tensor1d([this.inputSize*2,this.inputSize*2])}normalizeBoxes(t){let o={};o.boxOffsets=r.slice(t,[0,0],[-1,2]),o.boxSizes=r.slice(t,[0,2],[-1,2]),o.div=r.div(o.boxOffsets,this.inputSizeTensor),o.boxCenterPoints=r.add(o.div,this.anchorsTensor),o.halfBoxSizes=r.div(o.boxSizes,this.doubleInputSizeTensor),o.sub=r.sub(o.boxCenterPoints,o.halfBoxSizes),o.startPoints=r.mul(o.sub,this.inputSizeTensor),o.add=r.add(o.boxCenterPoints,o.halfBoxSizes),o.endPoints=r.mul(o.add,this.inputSizeTensor);let A=r.concat2d([o.startPoints,o.endPoints],1);return Object.keys(o).forEach(n=>r.dispose(o[n])),A}normalizeLandmarks(t,o){let A={};A.reshape=r.reshape(t,[-1,7,2]),A.div=r.div(A.reshape,this.inputSizeTensor),A.landmarks=r.add(A.div,this.anchors[o]);let n=r.mul(A.landmarks,this.inputSizeTensor);return Object.keys(A).forEach(s=>r.dispose(A[s])),n}async predict(t,o){let A={};A.resize=r.image.resizeBilinear(t,[this.inputSize,this.inputSize]),A.div=r.div(A.resize,L.tf127),A.image=r.sub(A.div,L.tf1),A.batched=this.model.execute(A.image),A.predictions=r.squeeze(A.batched),A.slice=r.slice(A.predictions,[0,0],[-1,1]),A.sigmoid=r.sigmoid(A.slice),A.scores=r.squeeze(A.sigmoid);let n=await A.scores.data();A.boxes=r.slice(A.predictions,[0,1],[-1,4]),A.norm=this.normalizeBoxes(A.boxes),A.nms=await r.image.nonMaxSuppressionAsync(A.norm,A.scores,3*o.hand.maxDetected,o.hand.iouThreshold,o.hand.minConfidence);let s=await A.nms.array(),a=[];for(let i of s){let x={};x.box=r.slice(A.norm,[i,0],[1,-1]),x.slice=r.slice(A.predictions,[i,5],[1,14]),x.norm=this.normalizeLandmarks(x.slice,i),x.palmLandmarks=r.reshape(x.norm,[-1,2]);let d=await x.box.data(),l=d.slice(0,2),y=d.slice(2,4),c=await x.palmLandmarks.array(),f={startPoint:l,endPoint:y,palmLandmarks:c,confidence:n[i]},h=VA(f,[t.shape[2]/this.inputSize,t.shape[1]/this.inputSize]);a.push(h),Object.keys(x).forEach(m=>r.dispose(x[m]))}return Object.keys(A).forEach(i=>r.dispose(A[i])),a}};var e2=5,UA=1.65,JA=[0,5,9,13,17,1,2],t2=0,o2=2,YA=0,P5=class{constructor(t,o){w(this,"handDetector");w(this,"handPoseModel");w(this,"inputSize");w(this,"storedBoxes");w(this,"skipped");w(this,"detectedHands");this.handDetector=t,this.handPoseModel=o,this.inputSize=this.handPoseModel&&this.handPoseModel.inputs[0].shape?this.handPoseModel.inputs[0].shape[2]:0,this.storedBoxes=[],this.skipped=Number.MAX_SAFE_INTEGER,this.detectedHands=0}calculateLandmarksBoundingBox(t){let o=t.map(a=>a[0]),A=t.map(a=>a[1]),n=[Math.min(...o),Math.min(...A)],s=[Math.max(...o),Math.max(...A)];return{startPoint:n,endPoint:s}}getBoxForPalmLandmarks(t,o){let A=t.map(s=>g5([...s,1],o)),n=this.calculateLandmarksBoundingBox(A);return lt(yt(n),e2)}getBoxForHandLandmarks(t){let o=this.calculateLandmarksBoundingBox(t),A=lt(yt(o),UA);A.palmLandmarks=[];for(let n=0;n[a[0]*(f[0]-this.inputSize/2),a[1]*(f[1]-this.inputSize/2),a[2]*f[2]]),x=b5(A,[0,0]),d=i.map(f=>[...g5(f,x),f[2]]),l=XA(n),y=[...Ne(o),1],c=[Z0(y,l[0]),Z0(y,l[1])];return d.map(f=>[Math.trunc(f[0]+c[0]),Math.trunc(f[1]+c[1]),Math.trunc(f[2])])}async estimateHands(t,o){let A=!1,n,s=(o.hand.skipTime||0)>M()-YA,a=this.skipped<(o.hand.skipFrames||0);o.skipAllowed&&s&&a&&(n=await this.handDetector.predict(t,o),this.skipped=0),o.skipAllowed&&this.skipped++,n&&n.length>0&&(n.length!==this.detectedHands&&this.detectedHands!==o.hand.maxDetected||!o.hand.landmarks)&&(this.detectedHands=0,this.storedBoxes=[...n],this.storedBoxes.length>0&&(A=!0));let i=[];for(let x=0;x=o.hand.minConfidence/4){let E=r.reshape(p,[-1,3]),W=await E.array();r.dispose(p),r.dispose(E);let C=this.transformRawCoords(W,m,l,h),S=this.getBoxForHandLandmarks(C);this.storedBoxes[x]={...S,confidence:u};let O={landmarks:C,confidence:u,boxConfidence:d.confidence,fingerConfidence:u,box:{topLeft:S.startPoint,bottomRight:S.endPoint}};i.push(O)}else this.storedBoxes[x]=null;r.dispose(p)}else{let l=lt(yt(d),UA),y={confidence:d.confidence,boxConfidence:d.confidence,fingerConfidence:0,box:{topLeft:l.startPoint,bottomRight:l.endPoint},landmarks:[]};i.push(y)}}return this.storedBoxes=this.storedBoxes.filter(x=>x!==null),this.detectedHands=i.length,i.length>o.hand.maxDetected&&(i.length=o.hand.maxDetected),i}};var i0={thumb:0,index:1,middle:2,ring:3,pinky:4,all:[0,1,2,3,4],nameMapping:{0:"thumb",1:"index",2:"middle",3:"ring",4:"pinky"},pointsMapping:{0:[[0,1],[1,2],[2,3],[3,4]],1:[[0,5],[5,6],[6,7],[7,8]],2:[[0,9],[9,10],[10,11],[11,12]],3:[[0,13],[13,14],[14,15],[15,16]],4:[[0,17],[17,18],[18,19],[19,20]]},getName:e=>i0.nameMapping[e],getPoints:e=>i0.pointsMapping[e]},X0={none:0,half:1,full:2,nameMapping:{0:"none",1:"half",2:"full"},getName:e=>X0.nameMapping[e]},U={verticalUp:0,verticalDown:1,horizontalLeft:2,horizontalRight:3,diagonalUpRight:4,diagonalUpLeft:5,diagonalDownRight:6,diagonalDownLeft:7,nameMapping:{0:"verticalUp",1:"verticalDown",2:"horizontalLeft",3:"horizontalRight",4:"diagonalUpRight",5:"diagonalUpLeft",6:"diagonalDownRight",7:"diagonalDownLeft"},getName:e=>U.nameMapping[e]},$0=class{constructor(t){w(this,"name");w(this,"curls");w(this,"directions");w(this,"weights");w(this,"weightsRelative");this.name=t,this.curls={},this.directions={},this.weights=[1,1,1,1,1],this.weightsRelative=[1,1,1,1,1]}curl(t,o,A){typeof this.curls[t]=="undefined"&&(this.curls[t]=[]),this.curls[t].push([o,A])}direction(t,o,A){this.directions[t]||(this.directions[t]=[]),this.directions[t].push([o,A])}weight(t,o){this.weights[t]=o;let A=this.weights.reduce((n,s)=>n+s,0);this.weightsRelative=this.weights.map(n=>n*5/A)}matchAgainst(t,o){let A=0;for(let n in t){let s=t[n],a=this.curls[n];if(typeof a=="undefined"){A+=this.weightsRelative[n];continue}for(let[i,x]of a)if(s===i){A+=x*this.weightsRelative[n];break}}for(let n in o){let s=o[n],a=this.directions[n];if(typeof a=="undefined"){A+=this.weightsRelative[n];continue}for(let[i,x]of a)if(s===i){A+=x*this.weightsRelative[n];break}}return A/10}};var{thumb:g0,index:O0,middle:N0,ring:ee,pinky:te}=i0,{none:M0,half:n2,full:P0}=X0,{verticalUp:me,verticalDown:fa,horizontalLeft:v5,horizontalRight:r2,diagonalUpRight:s2,diagonalUpLeft:pe,diagonalDownRight:ma,diagonalDownLeft:pa}=U,D0=new $0("thumbs up");D0.curl(g0,M0,1);D0.direction(g0,me,1);D0.direction(g0,pe,.25);D0.direction(g0,s2,.25);for(let e of[i0.index,i0.middle,i0.ring,i0.pinky])D0.curl(e,P0,1),D0.direction(e,v5,1),D0.direction(e,r2,1);var K=new $0("victory");K.curl(g0,n2,.5);K.curl(g0,M0,.5);K.direction(g0,me,1);K.direction(g0,pe,1);K.curl(O0,M0,1);K.direction(O0,me,.75);K.direction(O0,pe,1);K.curl(N0,M0,1);K.direction(N0,me,1);K.direction(N0,pe,.75);K.curl(ee,P0,1);K.direction(ee,me,.2);K.direction(ee,pe,1);K.direction(ee,v5,.2);K.curl(te,P0,1);K.direction(te,me,.2);K.direction(te,pe,1);K.direction(te,v5,.2);K.weight(O0,2);K.weight(N0,2);var q0=new $0("point");q0.curl(g0,P0,1);q0.curl(O0,M0,.5);q0.curl(N0,P0,.5);q0.curl(ee,P0,.5);q0.curl(te,P0,.5);q0.weight(O0,2);q0.weight(N0,2);var U0=new $0("middle finger");U0.curl(g0,M0,1);U0.curl(O0,P0,.5);U0.curl(N0,P0,.5);U0.curl(ee,P0,.5);U0.curl(te,P0,.5);U0.weight(O0,2);U0.weight(N0,2);var ue=new $0("open palm");ue.curl(g0,M0,.75);ue.curl(O0,M0,.75);ue.curl(N0,M0,.75);ue.curl(ee,M0,.75);ue.curl(te,M0,.75);var KA=[D0,K,q0,U0,ue];var a2=.7,oe={HALF_CURL_START_LIMIT:60,NO_CURL_START_LIMIT:130,DISTANCE_VOTE_POWER:1.1,SINGLE_ANGLE_VOTE_POWER:.9,TOTAL_ANGLE_VOTE_POWER:1.6};function QA(e,t,o,A){let n=(t-A)/(e-o),s=Math.atan(n)*180/Math.PI;return s<=0?s=-s:s>0&&(s=180-s),s}function $A(e,t){if(!e||!t)return[0,0];let o=QA(e[0],e[1],t[0],t[1]);if(e.length===2)return o;let A=QA(e[1],e[2],t[1],t[2]);return[o,A]}function _A(e,t=1){let o=0,A=0,n=0;return e>=75&&e<=105?o=1*t:e>=25&&e<=155?A=1*t:n=1*t,[o,A,n]}function i2(e,t,o){let A=e[0]-t[0],n=e[0]-o[0],s=t[0]-o[0],a=e[1]-t[1],i=e[1]-o[1],x=t[1]-o[1],d=e[2]-t[2],l=e[2]-o[2],y=t[2]-o[2],c=Math.sqrt(A*A+a*a+d*d),f=Math.sqrt(n*n+i*i+l*l),h=Math.sqrt(s*s+x*x+y*y),m=(h*h+c*c-f*f)/(2*h*c);m>1?m=1:m<-1&&(m=-1);let v=Math.acos(m);v=57.2958*v%180;let b;return v>oe.NO_CURL_START_LIMIT?b=X0.none:v>oe.HALF_CURL_START_LIMIT?b=X0.half:b=X0.full,b}function en(e,t,o,A){let n;return A===Math.abs(e)?e>0?n=U.horizontalLeft:n=U.horizontalRight:A===Math.abs(t)?t>0?n=U.horizontalLeft:n=U.horizontalRight:o>0?n=U.horizontalLeft:n=U.horizontalRight,n}function tn(e,t,o,A){let n;return A===Math.abs(e)?e<0?n=U.verticalDown:n=U.verticalUp:A===Math.abs(t)?t<0?n=U.verticalDown:n=U.verticalUp:o<0?n=U.verticalDown:n=U.verticalUp,n}function l2(e,t,o,A,n,s,a,i){let x,d=tn(e,t,o,A),l=en(n,s,a,i);return d===U.verticalUp?l===U.horizontalLeft?x=U.diagonalUpLeft:x=U.diagonalUpRight:l===U.horizontalLeft?x=U.diagonalDownLeft:x=U.diagonalDownRight,x}function y2(e,t,o,A){let n=e[0]-t[0],s=e[0]-o[0],a=t[0]-o[0],i=e[1]-t[1],x=e[1]-o[1],d=t[1]-o[1],l=Math.max(Math.abs(n),Math.abs(s),Math.abs(a)),y=Math.max(Math.abs(i),Math.abs(x),Math.abs(d)),c=0,f=0,h=0,m=y/(l+1e-5);m>1.5?c+=oe.DISTANCE_VOTE_POWER:m>.66?f+=oe.DISTANCE_VOTE_POWER:h+=oe.DISTANCE_VOTE_POWER;let v=Math.sqrt(n*n+i*i),b=Math.sqrt(s*s+x*x),P=Math.sqrt(a*a+d*d),p=Math.max(v,b,P),u=e[0],E=e[1],W=o[0],C=o[1];p===v?(W=o[0],C=o[1]):p===P&&(u=t[0],E=t[1]);let V=$A([u,E],[W,C]),B=_A(V,oe.TOTAL_ANGLE_VOTE_POWER);c+=B[0],f+=B[1],h+=B[2];for(let t0 of A){let z=_A(t0,oe.SINGLE_ANGLE_VOTE_POWER);c+=z[0],f+=z[1],h+=z[2]}let H;return c===Math.max(c,f,h)?H=tn(x,i,d,y):h===Math.max(f,h)?H=en(s,n,a,l):H=l2(x,i,d,y,s,n,a,l),H}function on(e){let t=[],o=[],A=[],n=[];if(!e)return{curls:A,directions:n};for(let s of i0.all){let a=i0.getPoints(s),i=[],x=[];for(let d of a){let l=e[d[0]],y=e[d[1]],c=$A(l,y),f=c[0],h=c[1];i.push(f),x.push(h)}t.push(i),o.push(x)}for(let s of i0.all){let a=s===i0.thumb?1:0,i=i0.getPoints(s),x=e[i[a][0]],d=e[i[a+1][1]],l=e[i[3][1]],y=i2(x,d,l),c=y2(x,d,l,t[s].slice(a));A[s]=y,n[s]=c}return{curls:A,directions:n}}function xt(e){if(!e||e.length===0)return null;let t=on(e),o={};for(let A of i0.all)o[i0.getName(A)]={curl:X0.getName(t.curls[A]),direction:U.getName(t.directions[A])};return o}function An(e){let t=[];if(!e||e.length===0)return t;let o=on(e);for(let A of KA){let n=A.matchAgainst(o.curls,o.directions);n>=a2&&t.push({name:A.name,confidence:n})}return t}var nn={thumb:[1,2,3,4],index:[5,6,7,8],middle:[9,10,11,12],ring:[13,14,15,16],pinky:[17,18,19,20],palm:[0]},he,be,rn;async function R5(e,t){let o=await rn.estimateHands(e,t);if(!o)return[];let A=[];for(let n=0;no[n].landmarks[y]);let a=o[n].landmarks,i=[Number.MAX_SAFE_INTEGER,Number.MAX_SAFE_INTEGER,0,0],x=[0,0,0,0];if(a&&a.length>0){for(let l of a)l[0]i[2]&&(i[2]=l[0]),l[1]>i[3]&&(i[3]=l[1]);i[2]-=i[0],i[3]-=i[1],x=[i[0]/(e.shape[2]||0),i[1]/(e.shape[1]||0),i[2]/(e.shape[2]||0),i[3]/(e.shape[1]||0)]}else i=o[n].box?[Math.trunc(Math.max(0,o[n].box.topLeft[0])),Math.trunc(Math.max(0,o[n].box.topLeft[1])),Math.trunc(Math.min(e.shape[2]||0,o[n].box.bottomRight[0])-Math.max(0,o[n].box.topLeft[0])),Math.trunc(Math.min(e.shape[1]||0,o[n].box.bottomRight[1])-Math.max(0,o[n].box.topLeft[1]))]:[0,0,0,0],x=[o[n].box.topLeft[0]/(e.shape[2]||0),o[n].box.topLeft[1]/(e.shape[1]||0),(o[n].box.bottomRight[0]-o[n].box.topLeft[0])/(e.shape[2]||0),(o[n].box.bottomRight[1]-o[n].box.topLeft[1])/(e.shape[1]||0)];let d=xt(a);A.push({id:n,score:Math.round(100*o[n].confidence)/100,boxScore:Math.round(100*o[n].boxConfidence)/100,fingerScore:Math.round(100*o[n].fingerConfidence)/100,label:"hand",box:i,boxRaw:x,keypoints:a,annotations:s,landmarks:d})}return A}async function w5(e){var o,A;R.initial&&(he=null,be=null),!he||!be?[he,be]=await Promise.all([e.hand.enabled?N((o=e.hand.detector)==null?void 0:o.modelPath):null,e.hand.landmarks?N((A=e.hand.skeleton)==null?void 0:A.modelPath):null]):(e.debug&&g("cached model:",he.modelUrl),e.debug&&g("cached model:",be.modelUrl));let t=new M5(he);return rn=new P5(t,be),[he,be]}var $=[null,null],x2=["StatefulPartitionedCall/Postprocessor/Slice","StatefulPartitionedCall/Postprocessor/ExpandDims_1"],J0=[[0,0],[0,0]],c2=["hand","fist","pinch","point","face","tip","pinchtip"],an=4,ln=1.6,d2=512,f2=1.4,ct=Number.MAX_SAFE_INTEGER,k5=0,L0=[0,0],J={boxes:[],hands:[]},yn={thumb:[1,2,3,4],index:[5,6,7,8],middle:[9,10,11,12],ring:[13,14,15,16],pinky:[17,18,19,20],base:[0],palm:[0,17,13,9,5,1,0]};async function xn(e){var t;if(R.initial&&($[0]=null),$[0])e.debug&&g("cached model:",$[0].modelUrl);else{dt(["tensorlistreserve","enter","tensorlistfromtensor","merge","loopcond","switch","exit","tensorliststack","nextiteration","tensorlistsetitem","tensorlistgetitem","reciprocal","shape","split","where"],e),$[0]=await N((t=e.hand.detector)==null?void 0:t.modelPath);let o=Object.values($[0].modelSignature.inputs);J0[0][0]=Array.isArray(o)?parseInt(o[0].tensorShape.dim[1].size):0,J0[0][1]=Array.isArray(o)?parseInt(o[0].tensorShape.dim[2].size):0}return $[0]}async function cn(e){var t;if(R.initial&&($[1]=null),$[1])e.debug&&g("cached model:",$[1].modelUrl);else{$[1]=await N((t=e.hand.skeleton)==null?void 0:t.modelPath);let o=Object.values($[1].modelSignature.inputs);J0[1][0]=Array.isArray(o)?parseInt(o[0].tensorShape.dim[1].size):0,J0[1][1]=Array.isArray(o)?parseInt(o[0].tensorShape.dim[2].size):0}return $[1]}async function m2(e,t){let o=[];if(!e||!$[0])return o;let A={},n=(e.shape[2]||1)/(e.shape[1]||1),s=Math.min(Math.round((e.shape[1]||0)/8)*8,d2),a=Math.round(s*n/8)*8;A.resize=r.image.resizeBilinear(e,[s,a]),A.cast=r.cast(A.resize,"int32"),[A.rawScores,A.rawBoxes]=await $[0].executeAsync(A.cast,x2),A.boxes=r.squeeze(A.rawBoxes,[0,2]),A.scores=r.squeeze(A.rawScores,[0]);let i=r.unstack(A.scores,1);r.dispose(i[an]),i.splice(an,1),A.filtered=r.stack(i,1),r.dispose(i),A.max=r.max(A.filtered,1),A.argmax=r.argMax(A.filtered,1);let x=0;A.nms=await r.image.nonMaxSuppressionAsync(A.boxes,A.max,(t.hand.maxDetected||0)+1,t.hand.iouThreshold||0,t.hand.minConfidence||1);let d=await A.nms.data(),l=await A.max.data(),y=await A.argmax.data();for(let c of Array.from(d)){let f=r.slice(A.boxes,c,1),h=await f.data();r.dispose(f);let m=[h[1],h[0],h[3]-h[1],h[2]-h[0]],v=tt(m,f2),b=[Math.trunc(m[0]*L0[0]),Math.trunc(m[1]*L0[1]),Math.trunc(m[2]*L0[0]),Math.trunc(m[3]*L0[1])],P=l[c],p=c2[y[c]],u={id:x++,score:P,box:b,boxRaw:v,label:p};o.push(u)}return Object.keys(A).forEach(c=>r.dispose(A[c])),o.sort((c,f)=>f.score-c.score),o.length>(t.hand.maxDetected||1)&&(o.length=t.hand.maxDetected||1),o}async function E5(e,t,o){let A={id:t.id,score:Math.round(100*t.score)/100,boxScore:Math.round(100*t.score)/100,fingerScore:0,box:t.box,boxRaw:t.boxRaw,label:t.label,keypoints:[],landmarks:{},annotations:{}};if(e&&$[1]&&o.hand.landmarks&&t.score>(o.hand.minConfidence||0)){let n={},s=[t.boxRaw[1],t.boxRaw[0],t.boxRaw[3]+t.boxRaw[1],t.boxRaw[2]+t.boxRaw[0]];n.crop=r.image.cropAndResize(e,[s],[0],[J0[1][0],J0[1][1]],"bilinear"),n.div=r.div(n.crop,L.tf255),[n.score,n.keypoints]=$[1].execute(n.div,["Identity_1","Identity"]);let a=(await n.score.data())[0],i=(100-Math.trunc(100/(1+Math.exp(a))))/100;if(i>=(o.hand.minConfidence||0)){A.fingerScore=i,n.reshaped=r.reshape(n.keypoints,[-1,3]);let l=(await n.reshaped.array()).map(y=>[y[0]/J0[1][1],y[1]/J0[1][0],y[2]||0]).map(y=>[y[0]*t.boxRaw[2],y[1]*t.boxRaw[3],y[2]||0]);A.keypoints=l.map(y=>[L0[0]*(y[0]+t.boxRaw[0]),L0[1]*(y[1]+t.boxRaw[1]),y[2]||0]),A.landmarks=xt(A.keypoints);for(let y of Object.keys(yn))A.annotations[y]=yn[y].map(c=>A.landmarks&&A.keypoints[c]?A.keypoints[c]:null)}Object.keys(n).forEach(x=>r.dispose(n[x]))}return A}async function z5(e,t){var n,s;if(!$[0]||!$[1]||!((n=$[0])==null?void 0:n.inputs[0].shape)||!((s=$[1])==null?void 0:s.inputs[0].shape))return[];L0=[e.shape[2]||0,e.shape[1]||0],ct++;let o=(t.hand.skipTime||0)>M()-k5,A=ct<(t.hand.skipFrames||0);return t.skipAllowed&&o&&A?J.hands:new Promise(async a=>{let i=3*(t.hand.skipTime||0)>M()-k5,x=ct<3*(t.hand.skipFrames||0);t.skipAllowed&&J.hands.length===t.hand.maxDetected?J.hands=await Promise.all(J.boxes.map(l=>E5(e,l,t))):t.skipAllowed&&i&&x&&J.hands.length>0?J.hands=await Promise.all(J.boxes.map(l=>E5(e,l,t))):(J.boxes=await m2(e,t),k5=M(),J.hands=await Promise.all(J.boxes.map(l=>E5(e,l,t))),ct=0);let d=[...J.boxes];if(J.boxes.length=0,t.cacheSensitivity>0)for(let l=0;l.05&&y.box[3]/(e.shape[1]||1)>.05&&J.hands[l].fingerScore&&J.hands[l].fingerScore>(t.hand.minConfidence||0)){let c=tt(y.box,ln),f=tt(y.boxRaw,ln);J.boxes.push({...d[l],box:c,boxRaw:f})}}for(let l=0;lM()-mn,s=j5<(((i=t.face.liveness)==null?void 0:i.skipFrames)||0);return t.skipAllowed&&n&&s&&fn===A&&ft[o]?(j5++,ft[o]):(j5=0,new Promise(async x=>{let d=r.image.resizeBilinear(e,[(r0==null?void 0:r0.inputs[0].shape)?r0.inputs[0].shape[2]:0,(r0==null?void 0:r0.inputs[0].shape)?r0.inputs[0].shape[1]:0],!1),l=r0==null?void 0:r0.execute(d),y=(await l.data())[0];ft[o]=Math.round(100*y)/100,fn=A,mn=M(),r.dispose([d,l]),x(ft[o])}))}var Le={};Ze(Le,{connected:()=>pt,horizontal:()=>W5,kpt:()=>mt,relative:()=>I5,vertical:()=>C5});var mt=["nose","leftEye","rightEye","leftEar","rightEar","leftShoulder","rightShoulder","leftElbow","rightElbow","leftWrist","rightWrist","leftHip","rightHip","leftKnee","rightKnee","leftAnkle","rightAnkle"],W5=[["leftEye","rightEye"],["leftEar","rightEar"],["leftShoulder","rightShoulder"],["leftElbow","rightElbow"],["leftWrist","rightWrist"],["leftHip","rightHip"],["leftKnee","rightKnee"],["leftAnkle","rightAnkle"]],C5=[["leftKnee","leftShoulder"],["rightKnee","rightShoulder"],["leftAnkle","leftKnee"],["rightAnkle","rightKnee"]],I5=[[["leftHip","rightHip"],["leftShoulder","rightShoulder"]],[["leftElbow","rightElbow"],["leftShoulder","rightShoulder"]]],pt={leftLeg:["leftHip","leftKnee","leftAnkle"],rightLeg:["rightHip","rightKnee","rightAnkle"],torso:["leftShoulder","rightShoulder","rightHip","leftHip","leftShoulder"],leftArm:["leftShoulder","leftElbow","leftWrist"],rightArm:["rightShoulder","rightElbow","rightWrist"],head:[]};var hn=.005,d0={keypoints:[],padding:[[0,0],[0,0],[0,0],[0,0]]};function O5(e){for(let t of W5){let o=e.keypoints.findIndex(n=>n.part===t[0]),A=e.keypoints.findIndex(n=>n.part===t[1]);if(e.keypoints[o]&&e.keypoints[A]&&e.keypoints[o].position[0]n&&n.part===t[0]),A=e.keypoints.findIndex(n=>n&&n.part===t[1]);e.keypoints[o]&&e.keypoints[A]&&e.keypoints[o].position[1]d&&d.part===t[0]),n=e.keypoints.findIndex(d=>d&&d.part===t[1]),s=e.keypoints.findIndex(d=>d&&d.part===o[0]),a=e.keypoints.findIndex(d=>d&&d.part===o[1]);if(!e.keypoints[s]||!e.keypoints[a])continue;let i=e.keypoints[A]?[Math.abs(e.keypoints[s].position[0]-e.keypoints[A].position[0]),Math.abs(e.keypoints[a].position[0]-e.keypoints[A].position[0])]:[0,0],x=e.keypoints[n]?[Math.abs(e.keypoints[a].position[0]-e.keypoints[n].position[0]),Math.abs(e.keypoints[s].position[0]-e.keypoints[n].position[0])]:[0,0];if(i[0]>i[1]||x[0]>x[1]){let d=e.keypoints[A];e.keypoints[A]=e.keypoints[n],e.keypoints[n]=d}}}function bn(e){for(let t=0;te.shape[1]?Math.trunc((e.shape[2]-e.shape[1])/2):0,e.shape[2]>e.shape[1]?Math.trunc((e.shape[2]-e.shape[1])/2):0],[e.shape[1]>e.shape[2]?Math.trunc((e.shape[1]-e.shape[2])/2):0,e.shape[1]>e.shape[2]?Math.trunc((e.shape[1]-e.shape[2])/2):0],[0,0]],o.pad=r.pad(e,d0.padding),o.resize=r.image.resizeBilinear(o.pad,[t,t]);let A=r.cast(o.resize,"int32");return Object.keys(o).forEach(n=>r.dispose(o[n])),A}function Mn(e,t){e.keypoints=e.keypoints.filter(A=>A&&A.position);for(let A of e.keypoints)A.position=[A.position[0]*(t[0]+d0.padding[2][0]+d0.padding[2][1])/t[0]-d0.padding[2][0],A.position[1]*(t[1]+d0.padding[1][0]+d0.padding[1][1])/t[1]-d0.padding[1][0]],A.positionRaw=[A.position[0]/t[0],A.position[1]/t[1]];let o=S0(e.keypoints.map(A=>A.position),t);return e.box=o.box,e.boxRaw=o.boxRaw,e}var f0,ut=0,N5=Number.MAX_SAFE_INTEGER,Ae={boxes:[],bodies:[],last:0};async function Pn(e){return R.initial&&(f0=null),f0?e.debug&&g("cached model:",f0.modelUrl):(dt(["size"],e),f0=await N(e.body.modelPath)),ut=f0.inputs[0].shape?f0.inputs[0].shape[2]:0,ut<64&&(ut=256),f0}async function u2(e,t,o){let A=e[0][0],n=[],s=0;for(let l=0;lt.body.minConfidence){let y=[A[l][1],A[l][0]];n.push({score:Math.round(100*s)/100,part:mt[l],positionRaw:y,position:[Math.round((o.shape[2]||0)*y[0]),Math.round((o.shape[1]||0)*y[1])]})}s=n.reduce((l,y)=>y.score>l?y.score:l,0);let a=[],i=S0(n.map(l=>l.position),[o.shape[2],o.shape[1]]),x={};for(let[l,y]of Object.entries(pt)){let c=[];for(let f=0;fv.part===y[f]),m=n.find(v=>v.part===y[f+1]);h&&m&&h.score>(t.body.minConfidence||0)&&m.score>(t.body.minConfidence||0)&&c.push([h.position,m.position])}x[l]=c}let d={id:0,score:s,box:i.box,boxRaw:i.boxRaw,keypoints:n,annotations:x};return O5(d),a.push(d),a}async function h2(e,t,o){let A=[];for(let n=0;nt.body.minConfidence){let i=[];for(let y=0;y<17;y++){let c=s[3*y+2];if(c>t.body.minConfidence){let f=[s[3*y+1],s[3*y+0]];i.push({part:mt[y],score:Math.round(100*c)/100,positionRaw:f,position:[Math.round((o.shape[2]||0)*f[0]),Math.round((o.shape[1]||0)*f[1])]})}}let x=S0(i.map(y=>y.position),[o.shape[2],o.shape[1]]),d={};for(let[y,c]of Object.entries(pt)){let f=[];for(let h=0;hb.part===c[h]),v=i.find(b=>b.part===c[h+1]);m&&v&&m.score>(t.body.minConfidence||0)&&v.score>(t.body.minConfidence||0)&&f.push([m.position,v.position])}d[y]=f}let l={id:n,score:a,box:x.box,boxRaw:x.boxRaw,keypoints:[...i],annotations:d};O5(l),A.push(l)}}return A.sort((n,s)=>s.score-n.score),A.length>t.body.maxDetected&&(A.length=t.body.maxDetected),A}async function L5(e,t){if(!f0||!(f0==null?void 0:f0.inputs[0].shape))return[];t.skipAllowed||(Ae.boxes.length=0),N5++;let o=(t.body.skipTime||0)>M()-Ae.last,A=N5<(t.body.skipFrames||0);return t.skipAllowed&&o&&A?Ae.bodies:new Promise(async n=>{let s={};N5=0,s.input=gn(e,ut),s.res=f0==null?void 0:f0.execute(s.input),Ae.last=M();let a=await s.res.array();Ae.bodies=s.res.shape[2]===17?await u2(a,t,e):await h2(a,t,e);for(let i of Ae.bodies)Mn(i,[e.shape[2]||1,e.shape[1]||1]),bn(i.keypoints);Object.keys(s).forEach(i=>r.dispose(s[i])),n(Ae.bodies)})}var ge,ht=[],Tn=0,B5=Number.MAX_SAFE_INTEGER,gt=0,bt=2.5;async function Rn(e){if(!ge||R.initial){ge=await N(e.object.modelPath);let t=Object.values(ge.modelSignature.inputs);gt=Array.isArray(t)?parseInt(t[0].tensorShape.dim[2].size):0}else e.debug&&g("cached model:",ge.modelUrl);return ge}async function b2(e,t,o){let A=0,n=[];for(let x of[1,2,4])r.tidy(async()=>{let d=x*13,l=r.squeeze(e.find(m=>m.shape[1]===d**2&&(m.shape[2]||0)===xe.length)),y=r.squeeze(e.find(m=>m.shape[1]===d**2&&(m.shape[2]||0)(o.object.minConfidence||0)&&v!==61){let P=(.5+Math.trunc(m%d))/d,p=(.5+Math.trunc(m/d))/d,u=f[m].map(H=>H*(d/x/gt)),[E,W]=[P-bt/x*u[0],p-bt/x*u[1]],[C,S]=[P+bt/x*u[2]-E,p+bt/x*u[3]-W],O=[E,W,C,S];O=O.map(H=>Math.max(0,Math.min(H,1)));let V=[O[0]*t[0],O[1]*t[1],O[2]*t[0],O[3]*t[1]],B={id:A++,score:Math.round(100*b)/100,class:v+1,label:xe[v].label,box:V.map(H=>Math.trunc(H)),boxRaw:O};n.push(B)}}});e.forEach(x=>r.dispose(x));let s=n.map(x=>[x.boxRaw[1],x.boxRaw[0],x.boxRaw[3],x.boxRaw[2]]),a=n.map(x=>x.score),i=[];if(s&&s.length>0){let x=await r.image.nonMaxSuppressionAsync(s,a,o.object.maxDetected,o.object.iouThreshold,o.object.minConfidence);i=await x.data(),r.dispose(x)}return n=n.filter((x,d)=>i.includes(d)).sort((x,d)=>d.score-x.score),n}async function G5(e,t){let o=(t.object.skipTime||0)>M()-Tn,A=B5<(t.object.skipFrames||0);return t.skipAllowed&&o&&A&&ht.length>0?(B5++,ht):(B5=0,!R.kernels.includes("mod")||!R.kernels.includes("sparsetodense")?ht:new Promise(async n=>{let s=[e.shape[2]||0,e.shape[1]||0],a=r.image.resizeBilinear(e,[gt,gt],!1),i=r.div(a,L.tf255),x=i.transpose([0,3,1,2]);r.dispose(i),r.dispose(a);let d;t.object.enabled&&(d=ge.execute(x)),Tn=M(),r.dispose(x);let l=await b2(d,s,t);ht=l,n(l)}))}var Ge=["nose","leftEye","rightEye","leftEar","rightEar","leftShoulder","rightShoulder","leftElbow","rightElbow","leftWrist","rightWrist","leftHip","rightHip","leftKnee","rightKnee","leftAnkle","rightAnkle"],g2=Ge.length,Be=Ge.reduce((e,t,o)=>(e[t]=o,e),{}),M2=[["leftHip","leftShoulder"],["leftElbow","leftShoulder"],["leftElbow","leftWrist"],["leftHip","leftKnee"],["leftKnee","leftAnkle"],["rightHip","rightShoulder"],["rightElbow","rightShoulder"],["rightElbow","rightWrist"],["rightHip","rightKnee"],["rightKnee","rightAnkle"],["leftShoulder","rightShoulder"],["leftHip","rightHip"]],Ha=M2.map(([e,t])=>[Be[e],Be[t]]),kn=[["nose","leftEye"],["leftEye","leftEar"],["nose","rightEye"],["rightEye","rightEar"],["nose","leftShoulder"],["leftShoulder","leftElbow"],["leftElbow","leftWrist"],["leftShoulder","leftHip"],["leftHip","leftKnee"],["leftKnee","leftAnkle"],["nose","rightShoulder"],["rightShoulder","rightElbow"],["rightElbow","rightWrist"],["rightShoulder","rightHip"],["rightHip","rightKnee"],["rightKnee","rightAnkle"]];function En(e){let t=e.reduce(({maxX:o,maxY:A,minX:n,minY:s},{position:{x:a,y:i}})=>({maxX:Math.max(o,a),maxY:Math.max(A,i),minX:Math.min(n,a),minY:Math.min(s,i)}),{maxX:Number.NEGATIVE_INFINITY,maxY:Number.NEGATIVE_INFINITY,minX:Number.POSITIVE_INFINITY,minY:Number.POSITIVE_INFINITY});return[t.minX,t.minY,t.maxX-t.minX,t.maxY-t.minY]}function zn(e,[t,o],[A,n]){let s=t/A,a=o/n,i=(d,l)=>({id:l,score:d.score,boxRaw:[d.box[0]/n,d.box[1]/A,d.box[2]/n,d.box[3]/A],box:[Math.trunc(d.box[0]*a),Math.trunc(d.box[1]*s),Math.trunc(d.box[2]*a),Math.trunc(d.box[3]*s)],keypoints:d.keypoints.map(({score:y,part:c,position:f})=>({score:y,part:c,position:[Math.trunc(f.x*a),Math.trunc(f.y*s)],positionRaw:[f.x/A,f.y/A]})),annotations:{}});return e.map((d,l)=>i(d,l))}var H5=class{constructor(t,o){w(this,"priorityQueue");w(this,"numberOfElements");w(this,"getElementValue");this.priorityQueue=new Array(t),this.numberOfElements=-1,this.getElementValue=o}enqueue(t){this.priorityQueue[++this.numberOfElements]=t,this.swim(this.numberOfElements)}dequeue(){let t=this.priorityQueue[0];return this.exchange(0,this.numberOfElements--),this.sink(0),this.priorityQueue[this.numberOfElements+1]=null,t}empty(){return this.numberOfElements===-1}size(){return this.numberOfElements+1}all(){return this.priorityQueue.slice(0,this.numberOfElements+1)}max(){return this.priorityQueue[0]}swim(t){for(;t>0&&this.less(Math.floor(t/2),t);)this.exchange(t,Math.floor(t/2)),t=Math.floor(t/2)}sink(t){for(;2*t<=this.numberOfElements;){let o=2*t;if(oo?o:e}function jn(e,t,o,A){let n=o-e,s=A-t;return n*n+s*s}function X5(e,t){return{x:e.x+t.x,y:e.y+t.y}}var v0,v2=["MobilenetV1/offset_2/BiasAdd","MobilenetV1/heatmap_2/BiasAdd","MobilenetV1/displacement_fwd_2/BiasAdd","MobilenetV1/displacement_bwd_2/BiasAdd"],Mt=1,Me=16,T2=50**2;function Sn(e,t,o,A,n,s,a=2){let i=b=>({y:s.get(b.y,b.x,e),x:s.get(b.y,b.x,s.shape[2]/2+e)}),x=(b,P,p)=>({y:Z5(Math.round(b.y/Me),0,P-1),x:Z5(Math.round(b.x/Me),0,p-1)}),[d,l]=A.shape,y=x(t.position,d,l),c=i(y),h=X5(t.position,c);for(let b=0;b[Be[c],Be[f]]),a=s.map(([,c])=>c),i=s.map(([c])=>c),x=t.shape[2],d=a.length,l=new Array(x),y=V5(e.part,Me,o);l[e.part.id]={score:e.score,part:Ge[e.part.id],position:y};for(let c=d-1;c>=0;--c){let f=a[c],h=i[c];l[f]&&!l[h]&&(l[h]=Sn(c,l[f],h,t,o,n))}for(let c=0;ct){i=!1;break}if(!i)break}return i}function k2(e,t){let[o,A,n]=t.shape,s=new H5(o*A*n,({score:a})=>a);for(let a=0;a{var a;let s=(a=n[A])==null?void 0:a.position;return s?jn(o,t,s.y,s.x)<=T2:!1})}function E2(e,t){return t.reduce((A,{position:n,score:s},a)=>(Wn(e,n,a)||(A+=s),A),0)/t.length}function z2(e,t,o,A,n,s){let a=[],i=k2(s,t);for(;a.lengthf.score>s);let y=E2(a,l),c=En(l);y>s&&a.push({keypoints:l,box:c,score:Math.round(100*y)/100})}return a}async function D5(e,t){let o=r.tidy(()=>{if(!v0.inputs[0].shape)return[];let a=r.image.resizeBilinear(e,[v0.inputs[0].shape[2],v0.inputs[0].shape[1]]),i=r.sub(r.div(r.cast(a,"float32"),127.5),1),d=v0.execute(i,v2).map(l=>r.squeeze(l,[0]));return d[1]=r.sigmoid(d[1]),d}),A=await Promise.all(o.map(a=>a.buffer()));for(let a of o)r.dispose(a);let n=await z2(A[0],A[1],A[2],A[3],t.body.maxDetected,t.body.minConfidence);return v0.inputs[0].shape?zn(n,[e.shape[1],e.shape[2]],[v0.inputs[0].shape[2],v0.inputs[0].shape[1]]):[]}async function Cn(e){return!v0||R.initial?v0=await N(e.body.modelPath):e.debug&&g("cached model:",v0.modelUrl),v0}var w0,q5=!1;async function U5(e){return!w0||R.initial?w0=await N(e.segmentation.modelPath):e.debug&&g("cached model:",w0.modelUrl),w0}async function On(e,t,o){var m,v;if(q5)return{data:[],canvas:null,alpha:null};q5=!0,w0||await U5(o);let A=await ie(e,o),n=((m=A.tensor)==null?void 0:m.shape[2])||0,s=((v=A.tensor)==null?void 0:v.shape[1])||0;if(!A.tensor)return{data:[],canvas:null,alpha:null};let a={};a.resize=r.image.resizeBilinear(A.tensor,[w0.inputs[0].shape?w0.inputs[0].shape[1]:0,w0.inputs[0].shape?w0.inputs[0].shape[2]:0],!1),r.dispose(A.tensor),a.norm=r.div(a.resize,L.tf255),a.res=w0.execute(a.norm),a.squeeze=r.squeeze(a.res,0),a.squeeze.shape[2]===2?(a.softmax=r.softmax(a.squeeze),[a.bg,a.fg]=r.unstack(a.softmax,2),a.expand=r.expandDims(a.fg,2),a.pad=r.expandDims(a.expand,0),a.crop=r.image.cropAndResize(a.pad,[[0,0,.5,.5]],[0],[n,s]),a.data=r.squeeze(a.crop,0)):a.data=r.image.resizeBilinear(a.squeeze,[s,n]);let i=Array.from(await a.data.data());if(R.node&&!R.Canvas&&typeof ImageData=="undefined")return o.debug&&g("canvas support missing"),Object.keys(a).forEach(b=>r.dispose(a[b])),{data:i,canvas:null,alpha:null};let x=s0(n,s);r.browser&&await r.browser.toPixels(a.data,x);let d=x.getContext("2d");o.segmentation.blur&&o.segmentation.blur>0&&(d.filter=`blur(${o.segmentation.blur}px)`);let l=d.getImageData(0,0,n,s),y=s0(n,s),c=y.getContext("2d");A.canvas&&c.drawImage(A.canvas,0,0),c.globalCompositeOperation="darken",o.segmentation.blur&&o.segmentation.blur>0&&(c.filter=`blur(${o.segmentation.blur}px)`),c.drawImage(x,0,0),c.globalCompositeOperation="source-over",c.filter="none";let f=c.getImageData(0,0,n,s);for(let b=0;br.dispose(a[b])),q5=!1,{data:i,canvas:y,alpha:x}}var J5=class{constructor(){w(this,"ssrnetage",null);w(this,"gear",null);w(this,"blazeposedetect",null);w(this,"blazepose",null);w(this,"centernet",null);w(this,"efficientpose",null);w(this,"mobilefacenet",null);w(this,"emotion",null);w(this,"facedetect",null);w(this,"faceiris",null);w(this,"facemesh",null);w(this,"faceres",null);w(this,"ssrnetgender",null);w(this,"handpose",null);w(this,"handskeleton",null);w(this,"handtrack",null);w(this,"liveness",null);w(this,"movenet",null);w(this,"nanodet",null);w(this,"posenet",null);w(this,"segmentation",null);w(this,"antispoof",null)}};function Y5(e){for(let t of Object.keys(e.models))e.models[t]=null}async function Ln(e){var t,o,A,n,s,a,i,x,d,l,y,c,f,h,m,v,b,P,p,u,E,W,C,S,O,V,B,H,t0,z;R.initial&&Y5(e),e.config.hand.enabled&&(!e.models.handpose&&((o=(t=e.config.hand.detector)==null?void 0:t.modelPath)==null?void 0:o.includes("handdetect"))&&([e.models.handpose,e.models.handskeleton]=await w5(e.config)),!e.models.handskeleton&&e.config.hand.landmarks&&((n=(A=e.config.hand.detector)==null?void 0:A.modelPath)==null?void 0:n.includes("handdetect"))&&([e.models.handpose,e.models.handskeleton]=await w5(e.config))),e.config.body.enabled&&!e.models.blazepose&&((a=(s=e.config.body)==null?void 0:s.modelPath)==null?void 0:a.includes("blazepose"))&&(e.models.blazepose=lA(e.config)),e.config.body.enabled&&!e.models.blazeposedetect&&e.config.body.detector&&e.config.body.detector.modelPath&&(e.models.blazeposedetect=iA(e.config)),e.config.body.enabled&&!e.models.efficientpose&&((x=(i=e.config.body)==null?void 0:i.modelPath)==null?void 0:x.includes("efficientpose"))&&(e.models.efficientpose=mA(e.config)),e.config.body.enabled&&!e.models.movenet&&((l=(d=e.config.body)==null?void 0:d.modelPath)==null?void 0:l.includes("movenet"))&&(e.models.movenet=Pn(e.config)),e.config.body.enabled&&!e.models.posenet&&((c=(y=e.config.body)==null?void 0:y.modelPath)==null?void 0:c.includes("posenet"))&&(e.models.posenet=Cn(e.config)),e.config.face.enabled&&!e.models.facedetect&&(e.models.facedetect=$o(e.config)),e.config.face.enabled&&((f=e.config.face.antispoof)==null?void 0:f.enabled)&&!e.models.antispoof&&(e.models.antispoof=Ho(e.config)),e.config.face.enabled&&((h=e.config.face.liveness)==null?void 0:h.enabled)&&!e.models.liveness&&(e.models.liveness=pn(e.config)),e.config.face.enabled&&((m=e.config.face.description)==null?void 0:m.enabled)&&!e.models.faceres&&(e.models.faceres=BA(e.config)),e.config.face.enabled&&((v=e.config.face.emotion)==null?void 0:v.enabled)&&!e.models.emotion&&(e.models.emotion=bA(e.config)),e.config.face.enabled&&((b=e.config.face.iris)==null?void 0:b.enabled)&&!e.models.faceiris&&(e.models.faceiris=zA(e.config)),e.config.face.enabled&&((P=e.config.face.mesh)==null?void 0:P.enabled)&&!e.models.facemesh&&(e.models.facemesh=CA(e.config)),e.config.face.enabled&&((p=e.config.face.gear)==null?void 0:p.enabled)&&!e.models.gear&&(e.models.gear=ko(e.config)),e.config.face.enabled&&((u=e.config.face.ssrnet)==null?void 0:u.enabled)&&!e.models.ssrnetage&&(e.models.ssrnetage=Wo(e.config)),e.config.face.enabled&&((E=e.config.face.ssrnet)==null?void 0:E.enabled)&&!e.models.ssrnetgender&&(e.models.ssrnetgender=No(e.config)),e.config.face.enabled&&((W=e.config.face.mobilefacenet)==null?void 0:W.enabled)&&!e.models.mobilefacenet&&(e.models.mobilefacenet=TA(e.config)),e.config.hand.enabled&&!e.models.handtrack&&((S=(C=e.config.hand.detector)==null?void 0:C.modelPath)==null?void 0:S.includes("handtrack"))&&(e.models.handtrack=xn(e.config)),e.config.hand.enabled&&e.config.hand.landmarks&&!e.models.handskeleton&&((V=(O=e.config.hand.detector)==null?void 0:O.modelPath)==null?void 0:V.includes("handtrack"))&&(e.models.handskeleton=cn(e.config)),e.config.object.enabled&&!e.models.centernet&&((H=(B=e.config.object)==null?void 0:B.modelPath)==null?void 0:H.includes("centernet"))&&(e.models.centernet=cA(e.config)),e.config.object.enabled&&!e.models.nanodet&&((z=(t0=e.config.object)==null?void 0:t0.modelPath)==null?void 0:z.includes("nanodet"))&&(e.models.nanodet=Rn(e.config)),e.config.segmentation.enabled&&!e.models.segmentation&&(e.models.segmentation=U5(e.config));for await(let m0 of Object.keys(e.models))e.models[m0]&&typeof e.models[m0]!="undefined"&&(e.models[m0]=await e.models[m0])}async function Bn(e){let t=["const","placeholder","noop","pad","squeeze","add","sub","mul","div"];for(let o of Object.keys(e.models)){let A=e.models[o];if(!A)continue;let n=[],s=A==null?void 0:A.executor;if(s&&s.graph.nodes)for(let i of Object.values(s.graph.nodes)){let x=i.op.toLowerCase();n.includes(x)||n.push(x)}else!s&&e.config.debug&&g("model signature not determined:",o);let a=[];for(let i of n)!t.includes(i)&&!e.env.kernels.includes(i)&&!e.env.kernels.includes(i.replace("_",""))&&!e.env.kernels.includes(i.replace("native",""))&&!e.env.kernels.includes(i.replace("v2",""))&&a.push(i);e.config.debug&&a.length>0&&g("model validation failed:",o,a)}}var q={name:"humangl",priority:999,canvas:null,gl:null,extensions:[],webGLattr:{alpha:!1,antialias:!1,premultipliedAlpha:!1,preserveDrawingBuffer:!1,depth:!1,stencil:!1,failIfMajorPerformanceCaveat:!1,desynchronized:!0}};function j2(){let e=q.gl;!e||(q.extensions=e.getSupportedExtensions())}async function Hn(e){var t;if(e.config.backend==="humangl"&&(q.name in r.engine().registry&&(!q.gl||!q.gl.getParameter(q.gl.VERSION))&&(g("error: humangl backend invalid context"),Y5(e)),!r.findBackend(q.name))){try{q.canvas=await s0(100,100)}catch(A){g("error: cannot create canvas:",A);return}try{if(q.gl=(t=q.canvas)==null?void 0:t.getContext("webgl2",q.webGLattr),!q.gl.getParameter(q.gl.VERSION).includes("2.0")){g("override: using fallback webgl backend as webgl 2.0 is not detected"),e.config.backend="webgl";return}q.canvas&&(q.canvas.addEventListener("webglcontextlost",async n=>{throw g("error: humangl:",n.type),g("possible browser memory leak using webgl or conflict with multiple backend registrations"),e.emit("error"),new Error("backend error: webgl context lost")}),q.canvas.addEventListener("webglcontextrestored",n=>{g("error: humangl context restored:",n)}),q.canvas.addEventListener("webglcontextcreationerror",n=>{g("error: humangl context create:",n)}))}catch(A){g("error: cannot get WebGL context:",A);return}try{r.setWebGLContext(2,q.gl)}catch(A){g("error: cannot set WebGL context:",A);return}try{let A=new r.GPGPUContext(q.gl);r.registerBackend(q.name,()=>new r.MathBackendWebGL(A),q.priority)}catch(A){g("error: cannot register WebGL backend:",A);return}try{r.getKernelsForBackend("webgl").forEach(n=>{let s={...n,backendName:q.name};r.registerKernel(s)})}catch(A){g("error: cannot update WebGL backend registration:",A);return}let o=r.backend().getGPGPUContext?r.backend().getGPGPUContext().gl:null;if(o)g(`humangl webgl version:${o.getParameter(o.VERSION)} renderer:${o.getParameter(o.RENDERER)}`);else{g("error: no current gl context:",o,q.gl);return}try{r.ENV.set("WEBGL_VERSION",2)}catch(A){g("error: cannot set WebGL backend flags:",A);return}j2(),g("backend registered:",q.name)}}function S2(){if(!R.kernels.includes("mod")){let e={kernelName:"Mod",backendName:r.getBackend(),kernelFunc:t=>r.tidy(()=>r.sub(t.inputs.a,r.mul(r.div(t.inputs.a,t.inputs.b),t.inputs.b)))};r.registerKernel(e),R.kernels.push("mod")}if(!R.kernels.includes("floormod")){let e={kernelName:"FloorMod",backendName:r.getBackend(),kernelFunc:t=>r.tidy(()=>r.floorDiv(t.inputs.a/t.inputs.b)*t.inputs.b+r.mod(t.inputs.a,t.inputs.b))};r.registerKernel(e),R.kernels.push("floormod")}}async function Pt(e,t=!1){if(e.state="backend",t||R.initial||e.config.backend&&e.config.backend.length>0&&r.getBackend()!==e.config.backend){let o=M();if(e.config.backend&&e.config.backend.length>0){if(typeof window=="undefined"&&typeof WorkerGlobalScope!="undefined"&&e.config.debug&&e.config.debug&&g("running inside web worker"),R.browser&&e.config.backend==="tensorflow"&&(e.config.debug&&g("override: backend set to tensorflow while running in browser"),e.config.backend="humangl"),R.node&&(e.config.backend==="webgl"||e.config.backend==="humangl")&&(e.config.debug&&g(`override: backend set to ${e.config.backend} while running in nodejs`),e.config.backend="tensorflow"),R.browser&&e.config.backend==="webgpu")if(typeof navigator=="undefined"||typeof navigator.gpu=="undefined")g("override: backend set to webgpu but browser does not support webgpu"),e.config.backend="humangl";else{let n=await navigator.gpu.requestAdapter();e.config.debug&&g("enumerated webgpu adapter:",n)}e.config.backend==="humangl"&&await Hn(e);let A=Object.keys(r.engine().registryFactory);if(e.config.debug&&g("available backends:",A),A.includes(e.config.backend)||(g(`error: backend ${e.config.backend} not found in registry`),e.config.backend=R.node?"tensorflow":"webgl",e.config.debug&&g(`override: setting backend ${e.config.backend}`)),e.config.debug&&g("setting backend:",e.config.backend),e.config.backend==="wasm"){if(e.config.debug&&g("wasm path:",e.config.wasmPath),typeof(r==null?void 0:r.setWasmPaths)!="undefined")await r.setWasmPaths(e.config.wasmPath);else throw new Error("backend error: attempting to use wasm backend but wasm path is not set");let n=await r.env().getAsync("WASM_HAS_SIMD_SUPPORT"),s=await r.env().getAsync("WASM_HAS_MULTITHREAD_SUPPORT");e.config.debug&&g(`wasm execution: ${n?"SIMD":"no SIMD"} ${s?"multithreaded":"singlethreaded"}`),e.config.debug&&!n&&g("warning: wasm simd support is not enabled")}try{await r.setBackend(e.config.backend),await r.ready(),zo()}catch(n){return g("error: cannot set backend:",e.config.backend,n),!1}}if(r.getBackend()==="humangl"&&(r.ENV.set("CHECK_COMPUTATION_FOR_ERRORS",!1),r.ENV.set("WEBGL_CPU_FORWARD",!0),r.ENV.set("WEBGL_USE_SHAPES_UNIFORMS",!0),r.ENV.set("CPU_HANDOFF_SIZE_THRESHOLD",256),typeof e.config.deallocate!="undefined"&&e.config.deallocate&&(g("changing webgl: WEBGL_DELETE_TEXTURE_THRESHOLD:",!0),r.ENV.set("WEBGL_DELETE_TEXTURE_THRESHOLD",0)),r.backend().getGPGPUContext)){let A=await r.backend().getGPGPUContext().gl;e.config.debug&&g(`gl version:${A.getParameter(A.VERSION)} renderer:${A.getParameter(A.RENDERER)}`)}r.getBackend()==="webgpu",r.enableProdMode(),await r.ready(),e.performance.initBackend=Math.trunc(M()-o),e.config.backend=r.getBackend(),await R.updateBackend(),S2()}return!0}function dt(e,t){for(let o of e){let A={kernelName:o,backendName:t.backend,kernelFunc:()=>{t.debug&&g("kernelFunc",o,t.backend)}};r.registerKernel(A)}R.kernels=r.getKernelsForBackend(r.getBackend()).map(o=>o.kernelName.toLowerCase())}var B0={color:"rgba(173, 216, 230, 0.6)",labelColor:"rgba(173, 216, 230, 1)",shadowColor:"black",font:'small-caps 16px "Segoe UI"',lineHeight:18,lineWidth:4,pointSize:2,roundRect:8,drawPoints:!1,drawLabels:!0,drawBoxes:!0,drawGestures:!0,drawPolygons:!0,drawGaze:!0,fillPolygons:!1,useDepth:!0,useCurves:!1},K5=0,ne=e=>{if(!e)g("draw error: invalid canvas");else if(!e.getContext)g("draw error: canvas context not defined");else{let t=e.getContext("2d");if(!t)g("draw error: cannot get canvas context");else return t}return null},Pe=e=>Math.round(e*180/Math.PI);function Q5(e,t,o,A,n){A=A||0,e.fillStyle=n.useDepth&&A?`rgba(${127.5+2*A}, ${127.5-2*A}, 255, 0.3)`:n.color,e.beginPath(),e.arc(t,o,n.pointSize,0,2*Math.PI),e.fill()}function He(e,t,o,A,n,s){if(e.beginPath(),e.lineWidth=s.lineWidth,s.useCurves){let a=(t+t+A)/2,i=(o+o+n)/2;e.ellipse(a,i,A/2,n/2,0,0,2*Math.PI)}else e.moveTo(t+s.roundRect,o),e.lineTo(t+A-s.roundRect,o),e.quadraticCurveTo(t+A,o,t+A,o+s.roundRect),e.lineTo(t+A,o+n-s.roundRect),e.quadraticCurveTo(t+A,o+n,t+A-s.roundRect,o+n),e.lineTo(t+s.roundRect,o+n),e.quadraticCurveTo(t,o+n,t,o+n-s.roundRect),e.lineTo(t,o+s.roundRect),e.quadraticCurveTo(t,o,t+s.roundRect,o),e.closePath();e.stroke()}function Zn(e,t,o){if(!(t.length<2)){e.beginPath(),e.moveTo(t[0][0],t[0][1]);for(let A of t){let n=A[2]||0;e.strokeStyle=o.useDepth&&n!==0?`rgba(${127.5+2*n}, ${127.5-2*n}, 255, 0.3)`:o.color,e.fillStyle=o.useDepth&&n!==0?`rgba(${127.5+2*n}, ${127.5-2*n}, 255, 0.3)`:o.color,e.lineTo(A[0],Math.round(A[1]))}e.stroke(),o.fillPolygons&&(e.closePath(),e.fill())}}function C2(e,t,o){if(!(t.length<2)){if(e.lineWidth=o.lineWidth,!o.useCurves||t.length<=2){Zn(e,t,o);return}e.moveTo(t[0][0],t[0][1]);for(let A=0;A1&&x[1].length>0){let d=i[1]>0?`#${i[1]}`:"",l=`${i[0]} ${d}: ${x[1]}`;A.shadowColor&&A.shadowColor!==""&&(n.fillStyle=A.shadowColor,n.fillText(l,8,2+s*A.lineHeight)),n.fillStyle=A.labelColor,n.fillText(l,6,0+s*A.lineHeight),s+=1}}}}async function $5(e,t,o){var s,a,i,x,d;let A=o0(B0,o);if(!t||!e)return;let n=ne(e);if(!!n)for(let l of t){if(n.font=A.font,n.strokeStyle=A.color,n.fillStyle=A.color,A.drawBoxes&&He(n,l.box[0],l.box[1],l.box[2],l.box[3],A),A.drawLabels){let y=[];if(y.push(`face: ${Math.trunc(100*l.score)}%`),l.genderScore&&y.push(`${l.gender||""} ${Math.trunc(100*l.genderScore)}%`),l.age&&y.push(`age: ${l.age||""}`),l.iris&&y.push(`distance: ${l.iris}`),l.real&&y.push(`real: ${Math.trunc(100*l.real)}%`),l.live&&y.push(`live: ${Math.trunc(100*l.live)}%`),l.emotion&&l.emotion.length>0){let c=l.emotion.map(f=>`${Math.trunc(100*f.score)}% ${f.emotion}`);c.length>3&&(c.length=3),y.push(c.join(" "))}l.rotation&&l.rotation.angle&&l.rotation.gaze&&(l.rotation.angle.roll&&y.push(`roll: ${Pe(l.rotation.angle.roll)}\xB0 yaw:${Pe(l.rotation.angle.yaw)}\xB0 pitch:${Pe(l.rotation.angle.pitch)}\xB0`),l.rotation.gaze.bearing&&y.push(`gaze: ${Pe(l.rotation.gaze.bearing)}\xB0`)),y.length===0&&y.push("face"),n.fillStyle=A.color;for(let c=y.length-1;c>=0;c--){let f=Math.max(l.box[0],0),h=c*A.lineHeight+l.box[1];A.shadowColor&&A.shadowColor!==""&&(n.fillStyle=A.shadowColor,n.fillText(y[c],f+5,h+16)),n.fillStyle=A.labelColor,n.fillText(y[c],f+4,h+15)}}if(n.lineWidth=2,l.mesh&&l.mesh.length>0){if(A.drawPoints)for(let y of l.mesh)Q5(n,y[0],y[1],y[2],A);if(A.drawPolygons){if(l.mesh.length>450)for(let y=0;yl.mesh[f]);Zn(n,c,A)}if(l.annotations&&l.annotations.leftEyeIris&&l.annotations.leftEyeIris[0]){n.strokeStyle=A.useDepth?"rgba(255, 200, 255, 0.3)":A.color,n.beginPath();let y=Math.abs(l.annotations.leftEyeIris[3][0]-l.annotations.leftEyeIris[1][0])/2,c=Math.abs(l.annotations.leftEyeIris[4][1]-l.annotations.leftEyeIris[2][1])/2;n.ellipse(l.annotations.leftEyeIris[0][0],l.annotations.leftEyeIris[0][1],y,c,0,0,2*Math.PI),n.stroke(),A.fillPolygons&&(n.fillStyle=A.useDepth?"rgba(255, 255, 200, 0.3)":A.color,n.fill())}if(l.annotations&&l.annotations.rightEyeIris&&l.annotations.rightEyeIris[0]){n.strokeStyle=A.useDepth?"rgba(255, 200, 255, 0.3)":A.color,n.beginPath();let y=Math.abs(l.annotations.rightEyeIris[3][0]-l.annotations.rightEyeIris[1][0])/2,c=Math.abs(l.annotations.rightEyeIris[4][1]-l.annotations.rightEyeIris[2][1])/2;n.ellipse(l.annotations.rightEyeIris[0][0],l.annotations.rightEyeIris[0][1],y,c,0,0,2*Math.PI),n.stroke(),A.fillPolygons&&(n.fillStyle=A.useDepth?"rgba(255, 255, 200, 0.3)":A.color,n.fill())}if(A.drawGaze&&((s=l.rotation)==null?void 0:s.angle)&&typeof Path2D!="undefined"){n.strokeStyle="pink";let y=l.box[0]+l.box[2]/2-l.box[3]*Pe(l.rotation.angle.yaw)/90,c=l.box[1]+l.box[3]/2+l.box[2]*Pe(l.rotation.angle.pitch)/90,f=new Path2D(` + M ${l.box[0]+l.box[2]/2} ${l.box[1]} + C + ${y} ${l.box[1]}, + ${y} ${l.box[1]+l.box[3]}, + ${l.box[0]+l.box[2]/2} ${l.box[1]+l.box[3]} + `),h=new Path2D(` + M ${l.box[0]} ${l.box[1]+l.box[3]/2} + C + ${l.box[0]} ${c}, + ${l.box[0]+l.box[2]} ${c}, + ${l.box[0]+l.box[2]} ${l.box[1]+l.box[3]/2} + `);n.stroke(h),n.stroke(f)}if(A.drawGaze&&((i=(a=l.rotation)==null?void 0:a.gaze)==null?void 0:i.strength)&&((d=(x=l.rotation)==null?void 0:x.gaze)==null?void 0:d.bearing)&&l.annotations.leftEyeIris&&l.annotations.rightEyeIris&&l.annotations.leftEyeIris[0]&&l.annotations.rightEyeIris[0]){n.strokeStyle="pink",n.fillStyle="pink";let y=[l.annotations.leftEyeIris[0][0]+Math.sin(l.rotation.gaze.bearing)*l.rotation.gaze.strength*l.box[3],l.annotations.leftEyeIris[0][1]+Math.cos(l.rotation.gaze.bearing)*l.rotation.gaze.strength*l.box[2]];Vn(n,[l.annotations.leftEyeIris[0][0],l.annotations.leftEyeIris[0][1]],[y[0],y[1]],4);let c=[l.annotations.rightEyeIris[0][0]+Math.sin(l.rotation.gaze.bearing)*l.rotation.gaze.strength*l.box[3],l.annotations.rightEyeIris[0][1]+Math.cos(l.rotation.gaze.bearing)*l.rotation.gaze.strength*l.box[2]];Vn(n,[l.annotations.rightEyeIris[0][0],l.annotations.rightEyeIris[0][1]],[c[0],c[1]],4)}}}}}async function eo(e,t,o){var s;let A=o0(B0,o);if(!t||!e)return;let n=ne(e);if(!!n){n.lineJoin="round";for(let a=0;a0)for(let a of s.keypoints)n.fillStyle=A.useDepth?`rgba(${127.5+2*(a[2]||0)}, ${127.5-2*(a[2]||0)}, 255, 0.5)`:A.color,Q5(n,a[0],a[1],0,A);if(A.drawLabels&&s.annotations){let a=(i,x)=>{if(!i||i.length===0||!i[0])return;let d=i[i.length-1][2]||0;n.fillStyle=A.useDepth?`rgba(${127.5+2*d}, ${127.5-2*d}, 255, 0.5)`:A.color,n.fillText(x,i[i.length-1][0]+4,i[i.length-1][1]+4)};n.font=A.font,a(s.annotations.index,"index"),a(s.annotations.middle,"middle"),a(s.annotations.ring,"ring"),a(s.annotations.pinky,"pinky"),a(s.annotations.thumb,"thumb"),a(s.annotations.palm,"palm")}if(A.drawPolygons&&s.annotations){let a=i=>{if(!(!i||i.length===0||!i[0]))for(let x=0;x0?x-1:0][0],i[x>0?x-1:0][1]),n.lineTo(i[x][0],i[x][1]),n.stroke()}};n.lineWidth=A.lineWidth,a(s.annotations.index),a(s.annotations.middle),a(s.annotations.ring),a(s.annotations.pinky),a(s.annotations.thumb)}}}}async function oo(e,t,o){let A=o0(B0,o);if(!t||!e)return;let n=ne(e);if(!!n){n.lineJoin="round",n.font=A.font;for(let s of t)if(A.drawBoxes){if(n.strokeStyle=A.color,n.fillStyle=A.color,He(n,s.box[0],s.box[1],s.box[2],s.box[3],A),A.drawLabels){let a=`${s.label} ${Math.round(100*s.score)}%`;A.shadowColor&&A.shadowColor!==""&&(n.fillStyle=A.shadowColor,n.fillText(a,s.box[0]+3,1+s.box[1]+A.lineHeight,s.box[2])),n.fillStyle=A.labelColor,n.fillText(a,s.box[0]+2,0+s.box[1]+A.lineHeight,s.box[2])}n.stroke()}}}async function Xn(e,t,o){let A=o0(B0,o);if(!t||!e)return;let n=ne(e);if(!!n){n.lineJoin="round",n.font=A.font;for(let s=0;st!=o[n].y>t&&e<(o[n].x-o[s].x)*(t-o[s].y)/(o[n].y-o[s].y)+o[s].x&&(A=!A);return A}async function Un(e){if(!e.tensor||!e.mesh||e.mesh.length<100)return e.tensor;let t=e.tensor.shape[2]||0,o=e.tensor.shape[1]||0,A=await e.tensor.buffer(),n=[];for(let a of b0.silhouette)n.push({x:(e.mesh[a][0]-e.box[0])/e.box[2],y:(e.mesh[a][1]-e.box[1])/e.box[3]});ve&&ve>0&&(n=n.map(a=>({x:a.x>.5?a.x+ve:a.x-ve,y:a.y>.5?a.y+ve:a.y-ve})));for(let a=0;a{let t=(y,c)=>Math.atan2(y[1]-c[1],y[0]-c[0]);if(!e.annotations.rightEyeIris||!e.annotations.leftEyeIris)return{bearing:0,strength:0};let o=[0,-.1],A=1,n=(e.mesh[33][2]||0)>(e.mesh[263][2]||0),s=n?e.mesh[473]:e.mesh[468],a=n?[(e.mesh[133][0]+e.mesh[33][0])/2,(e.mesh[133][1]+e.mesh[33][1])/2]:[(e.mesh[263][0]+e.mesh[362][0])/2,(e.mesh[263][1]+e.mesh[362][1])/2],i=n?[e.mesh[133][0]-e.mesh[33][0],e.mesh[23][1]-e.mesh[27][1]]:[e.mesh[263][0]-e.mesh[362][0],e.mesh[253][1]-e.mesh[257][1]],x=[(a[0]-s[0])/i[0]-o[0],A*(s[1]-a[1])/i[1]-o[1]],d=Math.sqrt(x[0]**2+x[1]**2);return d=Math.min(d,e.boxRaw[2]/2,e.boxRaw[3]/2),{bearing:(t([0,0],x)+Math.PI/2)%Math.PI,strength:d}},Jn=(e,t)=>{let o=m=>{let v=Math.sqrt(m[0]*m[0]+m[1]*m[1]+m[2]*m[2]);return m[0]/=v,m[1]/=v,m[2]/=v,m},A=(m,v)=>{let b=m[0]-v[0],P=m[1]-v[1],p=m[2]-v[2];return[b,P,p]},n=(m,v)=>{let b=m[1]*v[2]-m[2]*v[1],P=m[2]*v[0]-m[0]*v[2],p=m[0]*v[1]-m[1]*v[0];return[b,P,p]},s=m=>{let[v,b,P,p,u,E,W,C,S]=m,O,V,B;return p<1?p>-1?(B=Math.asin(p),V=Math.atan2(-W,v),O=Math.atan2(-E,u)):(B=-Math.PI/2,V=-Math.atan2(C,S),O=0):(B=Math.PI/2,V=Math.atan2(C,S),O=0),isNaN(O)&&(O=0),isNaN(V)&&(V=0),isNaN(B)&&(B=0),{pitch:2*-O,yaw:2*-V,roll:2*-B}},a=e.meshRaw;if(!a||a.length<300)return{angle:{pitch:0,yaw:0,roll:0},matrix:[1,0,0,0,1,0,0,0,1],gaze:{bearing:0,strength:0}};let i=Math.max(e.boxRaw[2]*t[0],e.boxRaw[3]*t[1])/1.5,x=[a[10],a[152],a[234],a[454]].map(m=>[m[0]*t[0]/i,m[1]*t[1]/i,m[2]]),d=o(A(x[1],x[0])),l=o(A(x[3],x[2])),y=o(n(l,d));l=n(d,y);let c=[l[0],l[1],l[2],d[0],d[1],d[2],y[0],y[1],y[2]],f=s(c),h=a.length===478?L2(e):{bearing:0,strength:0};return{angle:f,matrix:c,gaze:h}};var no=async(e,t)=>{var f,h,m,v,b,P,p,u,E,W,C,S,O,V,B,H,t0,z,m0,G0,T,Q;let o=M(),A,n,s,a,i,x,d,l,y=[];e.state="run:face";let c=await WA(t,e.config);if(e.performance.face=R.perfadd?(e.performance.face||0)+Math.trunc(M()-o):Math.trunc(M()-o),!t.shape||t.shape.length!==4)return[];if(!c)return[];for(let k=0;k200?Jn(c[k],[t.shape[2],t.shape[1]]):null;e.analyze("Start Emotion:"),e.config.async?a=((h=e.config.face.emotion)==null?void 0:h.enabled)?l5(c[k].tensor||r.tensor([]),e.config,k,c.length):[]:(e.state="run:emotion",o=M(),a=((m=e.config.face.emotion)==null?void 0:m.enabled)?await l5(c[k].tensor||r.tensor([]),e.config,k,c.length):[],e.performance.emotion=R.perfadd?(e.performance.emotion||0)+Math.trunc(M()-o):Math.trunc(M()-o)),e.analyze("End Emotion:"),e.analyze("Start AntiSpoof:"),e.config.async?x=((v=e.config.face.antispoof)==null?void 0:v.enabled)?Vt(c[k].tensor||r.tensor([]),e.config,k,c.length):0:(e.state="run:antispoof",o=M(),x=((b=e.config.face.antispoof)==null?void 0:b.enabled)?await Vt(c[k].tensor||r.tensor([]),e.config,k,c.length):0,e.performance.antispoof=R.perfadd?(e.performance.antispoof||0)+Math.trunc(M()-o):Math.trunc(M()-o)),e.analyze("End AntiSpoof:"),e.analyze("Start Liveness:"),e.config.async?d=((P=e.config.face.liveness)==null?void 0:P.enabled)?S5(c[k].tensor||r.tensor([]),e.config,k,c.length):0:(e.state="run:liveness",o=M(),d=((p=e.config.face.liveness)==null?void 0:p.enabled)?await S5(c[k].tensor||r.tensor([]),e.config,k,c.length):0,e.performance.liveness=R.perfadd?(e.performance.antispoof||0)+Math.trunc(M()-o):Math.trunc(M()-o)),e.analyze("End Liveness:"),e.analyze("Start GEAR:"),e.config.async?n=((u=e.config.face.gear)==null?void 0:u.enabled)?Ot(c[k].tensor||r.tensor([]),e.config,k,c.length):null:(e.state="run:gear",o=M(),n=((E=e.config.face.gear)==null?void 0:E.enabled)?await Ot(c[k].tensor||r.tensor([]),e.config,k,c.length):null,e.performance.gear=Math.trunc(M()-o)),e.analyze("End GEAR:"),e.analyze("Start SSRNet:"),e.config.async?(A=((W=e.config.face.ssrnet)==null?void 0:W.enabled)?Lt(c[k].tensor||r.tensor([]),e.config,k,c.length):null,s=((C=e.config.face.ssrnet)==null?void 0:C.enabled)?Ht(c[k].tensor||r.tensor([]),e.config,k,c.length):null):(e.state="run:ssrnet",o=M(),A=((S=e.config.face.ssrnet)==null?void 0:S.enabled)?await Lt(c[k].tensor||r.tensor([]),e.config,k,c.length):null,s=((O=e.config.face.ssrnet)==null?void 0:O.enabled)?await Ht(c[k].tensor||r.tensor([]),e.config,k,c.length):null,e.performance.ssrnet=Math.trunc(M()-o)),e.analyze("End SSRNet:"),e.analyze("Start MobileFaceNet:"),e.config.async?i=((V=e.config.face.mobilefacenet)==null?void 0:V.enabled)?x5(c[k].tensor||r.tensor([]),e.config,k,c.length):null:(e.state="run:mobilefacenet",o=M(),i=((B=e.config.face.mobilefacenet)==null?void 0:B.enabled)?await x5(c[k].tensor||r.tensor([]),e.config,k,c.length):null,e.performance.mobilefacenet=Math.trunc(M()-o)),e.analyze("End MobileFaceNet:"),e.analyze("Start Description:"),e.config.async?l=((H=e.config.face.description)==null?void 0:H.enabled)?u5(c[k].tensor||r.tensor([]),e.config,k,c.length):null:(e.state="run:description",o=M(),l=((t0=e.config.face.description)==null?void 0:t0.enabled)?await u5(c[k].tensor||r.tensor([]),e.config,k,c.length):null,e.performance.description=R.perfadd?(e.performance.description||0)+Math.trunc(M()-o):Math.trunc(M()-o)),e.analyze("End Description:"),e.config.async&&([A,s,a,i,l,n,x,d]=await Promise.all([A,s,a,i,l,n,x,d])),e.analyze("Finish Face:"),((z=e.config.face.ssrnet)==null?void 0:z.enabled)&&A&&s&&(l={...l,age:A.age,gender:s.gender,genderScore:s.genderScore}),((m0=e.config.face.gear)==null?void 0:m0.enabled)&&n&&(l={...l,age:n.age,gender:n.gender,genderScore:n.genderScore,race:n.race}),((G0=e.config.face.mobilefacenet)==null?void 0:G0.enabled)&&i&&(l.descriptor=i),!((T=e.config.face.iris)==null?void 0:T.enabled);let Y=c[k].annotations&&c[k].annotations.leftEyeIris&&c[k].annotations.leftEyeIris[0]&&c[k].annotations.rightEyeIris&&c[k].annotations.rightEyeIris[0]&&c[k].annotations.leftEyeIris.length>0&&c[k].annotations.rightEyeIris.length>0&&c[k].annotations.leftEyeIris[0]!==null&&c[k].annotations.rightEyeIris[0]!==null?Math.max(Math.abs(c[k].annotations.leftEyeIris[3][0]-c[k].annotations.leftEyeIris[1][0]),Math.abs(c[k].annotations.rightEyeIris[4][1]-c[k].annotations.rightEyeIris[2][1]))/t.shape[2]:0,e0=((Q=e.config.face.detector)==null?void 0:Q.return)?r.squeeze(c[k].tensor):null;r.dispose(c[k].tensor),c[k].tensor&&delete c[k].tensor;let I={...c[k],id:k};(l==null?void 0:l.age)&&(I.age=l.age),(l==null?void 0:l.gender)&&(I.gender=l.gender),(l==null?void 0:l.genderScore)&&(I.genderScore=l==null?void 0:l.genderScore),(l==null?void 0:l.descriptor)&&(I.embedding=l==null?void 0:l.descriptor),(l==null?void 0:l.race)&&(I.race=l==null?void 0:l.race),a&&(I.emotion=a),x&&(I.real=x),d&&(I.live=d),Y&&Y!==0&&(I.iris=Math.trunc(500/Y/11.7)/100),X&&(I.rotation=X),e0&&(I.tensor=e0),y.push(I),e.analyze("End Face")}return e.analyze("End FaceMesh:"),e.config.async&&(e.performance.face&&delete e.performance.face,e.performance.age&&delete e.performance.age,e.performance.gender&&delete e.performance.gender,e.performance.emotion&&delete e.performance.emotion),y};var Yn=e=>{if(!e)return[];let t=[];for(let o=0;ox.part==="leftWrist"),n=e[o].keypoints.find(x=>x.part==="rightWrist"),s=e[o].keypoints.find(x=>x.part==="nose");s&&A&&n&&A.position[1]x.part==="leftShoulder"),i=e[o].keypoints.find(x=>x.part==="rightShoulder");a&&i&&Math.abs(a.positionRaw[1]-i.positionRaw[1])>.1&&t.push({body:o,gesture:`leaning ${a.position[1]>i.position[1]?"left":"right"}`})}return t},Kn=e=>{if(!e)return[];let t=[];for(let o=0;o450){let A=(e[o].mesh[33][2]||0)-(e[o].mesh[263][2]||0),n=e[o].mesh[33][0]-e[o].mesh[263][0];Math.abs(A/n)<=.15?t.push({face:o,gesture:"facing center"}):t.push({face:o,gesture:`facing ${A<0?"left":"right"}`}),Math.abs(e[o].mesh[374][1]-e[o].mesh[386][1])/Math.abs(e[o].mesh[443][1]-e[o].mesh[450][1])<.2&&t.push({face:o,gesture:"blink left eye"}),Math.abs(e[o].mesh[145][1]-e[o].mesh[159][1])/Math.abs(e[o].mesh[223][1]-e[o].mesh[230][1])<.2&&t.push({face:o,gesture:"blink right eye"});let i=Math.min(100,500*Math.abs(e[o].mesh[13][1]-e[o].mesh[14][1])/Math.abs(e[o].mesh[10][1]-e[o].mesh[152][1]));i>10&&t.push({face:o,gesture:`mouth ${Math.trunc(i)}% open`});let x=e[o].mesh[152][2]||0;Math.abs(x)>10&&t.push({face:o,gesture:`head ${x<0?"up":"down"}`})}return t},Qn=e=>{if(!e)return[];let t=[];for(let o=0;o.06||c>.06)&&(d=!1),y>c?y>.05&&t.push({iris:o,gesture:"looking right"}):c>.05&&t.push({iris:o,gesture:"looking left"});let f=Math.abs(e[o].mesh[145][1]-e[o].annotations.rightEyeIris[0][1])/e[o].box[3],h=Math.abs(e[o].mesh[374][1]-e[o].annotations.leftEyeIris[0][1])/e[o].box[3];(h<.01||f<.01||h>.022||f>.022)&&(d=!1),(h<.01||f<.01)&&t.push({iris:o,gesture:"looking down"}),(h>.022||f>.022)&&t.push({iris:o,gesture:"looking up"}),d&&t.push({iris:o,gesture:"looking center"})}return t},_n=e=>{if(!e)return[];let t=[];for(let o=0;o0){let n=A.reduce((a,i)=>(a.position[2]||0)<(i.position[2]||0)?a:i);t.push({hand:o,gesture:`${n.name} forward`});let s=A.reduce((a,i)=>a.position[1]((n-1)*j.body[T].box[F]+I)/n),k=e.body[T].boxRaw.map((I,F)=>((n-1)*j.body[T].boxRaw[F]+I)/n),X=e.body[T].keypoints.map((I,F)=>{var k0,E0,Re,we,re,so,ao,io,lo;return{score:I.score,part:I.part,position:[j.body[T].keypoints[F]?((n-1)*(j.body[T].keypoints[F].position[0]||0)+(I.position[0]||0))/n:I.position[0],j.body[T].keypoints[F]?((n-1)*(j.body[T].keypoints[F].position[1]||0)+(I.position[1]||0))/n:I.position[1],j.body[T].keypoints[F]?((n-1)*(j.body[T].keypoints[F].position[2]||0)+(I.position[2]||0))/n:I.position[2]],positionRaw:[j.body[T].keypoints[F]?((n-1)*(j.body[T].keypoints[F].positionRaw[0]||0)+(I.positionRaw[0]||0))/n:I.positionRaw[0],j.body[T].keypoints[F]?((n-1)*(j.body[T].keypoints[F].positionRaw[1]||0)+(I.positionRaw[1]||0))/n:I.positionRaw[1],j.body[T].keypoints[F]?((n-1)*(j.body[T].keypoints[F].positionRaw[2]||0)+(I.positionRaw[2]||0))/n:I.positionRaw[2]],distance:[j.body[T].keypoints[F]?((n-1)*(((k0=j.body[T].keypoints[F].distance)==null?void 0:k0[0])||0)+(((E0=I.distance)==null?void 0:E0[0])||0))/n:(Re=I.distance)==null?void 0:Re[0],j.body[T].keypoints[F]?((n-1)*(((we=j.body[T].keypoints[F].distance)==null?void 0:we[1])||0)+(((re=I.distance)==null?void 0:re[1])||0))/n:(so=I.distance)==null?void 0:so[1],j.body[T].keypoints[F]?((n-1)*(((ao=j.body[T].keypoints[F].distance)==null?void 0:ao[2])||0)+(((io=I.distance)==null?void 0:io[2])||0))/n:(lo=I.distance)==null?void 0:lo[2]]}}),Y={},e0={connected:{}};((i=(a=t.body)==null?void 0:a.modelPath)==null?void 0:i.includes("efficientpose"))?e0=nt:((d=(x=t.body)==null?void 0:x.modelPath)==null?void 0:d.includes("blazepose"))?e0=$e:((y=(l=t.body)==null?void 0:l.modelPath)==null?void 0:y.includes("movenet"))&&(e0=Le);for(let[I,F]of Object.entries(e0.connected)){let k0=[];for(let E0=0;E0re.part===F[E0]),we=X.find(re=>re.part===F[E0+1]);Re&&we&&k0.push([Re.position,we.position])}Y[I]=k0}j.body[T]={...e.body[T],box:Q,boxRaw:k,keypoints:X,annotations:Y}}if(!j.hand||e.hand.length!==j.hand.length)j.hand=JSON.parse(JSON.stringify(e.hand));else for(let T=0;T((n-1)*j.hand[T].box[I]+e0)/n),k=e.hand[T].boxRaw.map((e0,I)=>((n-1)*j.hand[T].boxRaw[I]+e0)/n);j.hand[T].keypoints.length!==e.hand[T].keypoints.length&&(j.hand[T].keypoints=e.hand[T].keypoints);let X=e.hand[T].keypoints&&e.hand[T].keypoints.length>0?e.hand[T].keypoints.map((e0,I)=>e0.map((F,k0)=>((n-1)*(j.hand[T].keypoints[I][k0]||1)+(F||0))/n)):[],Y={};if(Object.keys(j.hand[T].annotations).length!==Object.keys(e.hand[T].annotations).length)j.hand[T].annotations=e.hand[T].annotations,Y=j.hand[T].annotations;else if(e.hand[T].annotations)for(let e0 of Object.keys(e.hand[T].annotations))Y[e0]=e.hand[T].annotations[e0]&&e.hand[T].annotations[e0][0]?e.hand[T].annotations[e0].map((I,F)=>I.map((k0,E0)=>((n-1)*j.hand[T].annotations[e0][F][E0]+k0)/n)):null;j.hand[T]={...e.hand[T],box:Q,boxRaw:k,keypoints:X,annotations:Y}}if(!j.face||e.face.length!==j.face.length)j.face=JSON.parse(JSON.stringify(e.face));else for(let T=0;T((n-1)*j.face[T].box[Y]+X)/n),k=e.face[T].boxRaw.map((X,Y)=>((n-1)*j.face[T].boxRaw[Y]+X)/n);if(e.face[T].rotation){let X={matrix:[0,0,0,0,0,0,0,0,0],angle:{roll:0,yaw:0,pitch:0},gaze:{bearing:0,strength:0}};X.matrix=(c=e.face[T].rotation)==null?void 0:c.matrix,X.angle={roll:((n-1)*(((h=(f=j.face[T].rotation)==null?void 0:f.angle)==null?void 0:h.roll)||0)+(((v=(m=e.face[T].rotation)==null?void 0:m.angle)==null?void 0:v.roll)||0))/n,yaw:((n-1)*(((P=(b=j.face[T].rotation)==null?void 0:b.angle)==null?void 0:P.yaw)||0)+(((u=(p=e.face[T].rotation)==null?void 0:p.angle)==null?void 0:u.yaw)||0))/n,pitch:((n-1)*(((W=(E=j.face[T].rotation)==null?void 0:E.angle)==null?void 0:W.pitch)||0)+(((S=(C=e.face[T].rotation)==null?void 0:C.angle)==null?void 0:S.pitch)||0))/n},X.gaze={bearing:((n-1)*(((V=(O=j.face[T].rotation)==null?void 0:O.gaze)==null?void 0:V.bearing)||0)+(((H=(B=e.face[T].rotation)==null?void 0:B.gaze)==null?void 0:H.bearing)||0))/n,strength:((n-1)*(((z=(t0=j.face[T].rotation)==null?void 0:t0.gaze)==null?void 0:z.strength)||0)+(((G0=(m0=e.face[T].rotation)==null?void 0:m0.gaze)==null?void 0:G0.strength)||0))/n},j.face[T]={...e.face[T],rotation:X,box:Q,boxRaw:k}}j.face[T]={...e.face[T],box:Q,boxRaw:k}}if(!j.object||e.object.length!==j.object.length)j.object=JSON.parse(JSON.stringify(e.object));else for(let T=0;T((n-1)*j.object[T].box[Y]+X)/n),k=e.object[T].boxRaw.map((X,Y)=>((n-1)*j.object[T].boxRaw[Y]+X)/n);j.object[T]={...e.object[T],box:Q,boxRaw:k}}if(e.persons){let T=e.persons;if(!j.persons||T.length!==j.persons.length)j.persons=JSON.parse(JSON.stringify(T));else for(let Q=0;Q((n-1)*j.persons[Q].box[X]+k)/n)}e.gesture&&(j.gesture=e.gesture);let s=M();return ro=R.perfadd?ro+Math.round(s-o):Math.round(s-o),e.performance&&(j.performance={...e.performance,interpolate:ro}),j}function vt(e,t,o={order:2,multiplier:25}){let A=0;for(let n=0;n{if(e===0)return 1;let n=t===2?Math.sqrt(e):e**(1/t),s=(1-n/100-o)/(A-o);return Math.max(Math.min(s,1),0)};function tr(e,t,o={order:2,multiplier:25,min:.2,max:.8}){let A=vt(e,t,o);return er(A,o.order||2,o.min||0,o.max||1)}function or(e,t,o={order:2,multiplier:25,threshold:0,min:.2,max:.8}){if(!Array.isArray(e)||!Array.isArray(t)||e.length<64||t.length===0||e.length!==t[0].length)return{index:-1,distance:Number.POSITIVE_INFINITY,similarity:0};let A=Number.MAX_SAFE_INTEGER,n=-1;for(let a=0;az.box[0]&&C.box[0]z.box[1]&&C.box[1]+C.box[3]S.body.box[0]&&z.box[0]+z.box[2]S.body.box[1]&&z.box[1]+z.box[3]S.body.box[0]&&z.box[1]+z.box[3]>S.body.box[1]&&z.box[1]+z.box[3]{z&&z.length===4&&(O.push(z[0],z[0]+z[2]),V.push(z[1],z[1]+z[3]))};B((b=S.face)==null?void 0:b.box),B((P=S.body)==null?void 0:P.box),B((u=(p=S.hands)==null?void 0:p.left)==null?void 0:u.box),B((W=(E=S.hands)==null?void 0:E.right)==null?void 0:W.box);let H=Math.min(...O),t0=Math.min(...V);S.box=[H,t0,Math.max(...O)-H,Math.max(...V)-t0],n&&n[1]&&n[2]&&(S.boxRaw=[S.box[0]/n[2],S.box[1]/n[1],S.box[2]/n[2],S.box[3]/n[1]]),a.push(S)}return a}var Tt=` +/9j/4AAQSkZJRgABAQEAYABgAAD/4QBoRXhpZgAATU0AKgAAAAgABAEaAAUAAAABAAAAPgEbAAUA +AAABAAAARgEoAAMAAAABAAIAAAExAAIAAAARAAAATgAAAAAAAABgAAAAAQAAAGAAAAABcGFpbnQu +bmV0IDQuMi4xMwAA/9sAQwAGBAUGBQQGBgUGBwcGCAoQCgoJCQoUDg8MEBcUGBgXFBYWGh0lHxob +IxwWFiAsICMmJykqKRkfLTAtKDAlKCko/9sAQwEHBwcKCAoTCgoTKBoWGigoKCgoKCgoKCgoKCgo +KCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgoKCgo/8AAEQgBAAEAAwEhAAIRAQMRAf/E +AB8AAAEFAQEBAQEBAAAAAAAAAAABAgMEBQYHCAkKC//EALUQAAIBAwMCBAMFBQQEAAABfQECAwAE +EQUSITFBBhNRYQcicRQygZGhCCNCscEVUtHwJDNicoIJChYXGBkaJSYnKCkqNDU2Nzg5OkNERUZH +SElKU1RVVldYWVpjZGVmZ2hpanN0dXZ3eHl6g4SFhoeIiYqSk5SVlpeYmZqio6Slpqeoqaqys7S1 +tre4ubrCw8TFxsfIycrS09TV1tfY2drh4uPk5ebn6Onq8fLz9PX29/j5+v/EAB8BAAMBAQEBAQEB +AQEAAAAAAAABAgMEBQYHCAkKC//EALURAAIBAgQEAwQHBQQEAAECdwABAgMRBAUhMQYSQVEHYXET +IjKBCBRCkaGxwQkjM1LwFWJy0QoWJDThJfEXGBkaJicoKSo1Njc4OTpDREVGR0hJSlNUVVZXWFla +Y2RlZmdoaWpzdHV2d3h5eoKDhIWGh4iJipKTlJWWl5iZmqKjpKWmp6ipqrKztLW2t7i5usLDxMXG +x8jJytLT1NXW19jZ2uLj5OXm5+jp6vLz9PX29/j5+v/aAAwDAQACEQMRAD8A+qaKACigApGOKAML +Xp8xlF5A7V4X8RtYs7PzfNImnx8sa8Kp9z3q2tEgp6angWs62ZZ5CTGoJ6DArGNz5p+UrID6EUrF +PUlW1EuN0XNW7PQ2L5j3JnoKXN0KijqNP0eYoqXBdgPuuo+ZPeupisWn2Jd4+0r924XgsQOCff3/ +AJ1FzRKxDqGii6m3siiQ8F1XGfXI6YNWLfRbiRQMkcZI9fpTDluT2/h6Qy8gDPbtmtG38JeY480Z +5zSLUTZg8M28YwYxjAArXtdPt402qgHbpSaLWhma3o0Uqk7Nx9DWLaaVblgPs6qRyds2M/gRSQp9 +zZOni2iWS2hlQ+kjYz9OMGrdjq89vIPPVhj+8M/lQyDq9P1WOYBlMZz1AOD+VdDaTiReOKulK0jO +tHmi0WDTlr0TyxRVhT8tJjIX+9SUxHXUV553BRQAVBcPhSBTSuxPY86+IGti0s5I7dsORy9fM3i6 +8e8mfDO5P90ZrWWiJicNPpZZtxV/xrW0jQt4DOv6Vk2dEEdTY6BHuB25rpbPSo0QARjP0qTRI17W +wA/hFaMWmoQMgflQXYsDS142rU9tpqqenfNA7GgtihxkdKuRW6qMY/GkDZY8sY4Ap4hXbyB+VArk +EtuH4wPyrk/EGkOm+a3jw3suRQLc5i38SX9hJ9nnY+XnBUdPyNdFY6pa3KkkAE9l6f8AfJ/pSJT6 +GhDmI+Zb4ZRycdv6ium0nUhKFydrelTsNnS2829RnrVgV6NKXNG55lWPLIM81Op+WrZkRMfmNNzT +A7GivPO4KKAEY4XNYWt3vkwPg4OK0giJdjw/xrqhm87Zs8tc7pX5A+leSajf6aHYJ50kn4AZpTep +rBWRm2Vobm4BXfyehPFdnpmnBFUY5rI2SN63tlToK0YI+KZpFF+3QdavwoKTLtoW0Toaswpk5pCb +LCxipAhoIuP2dKevHXoaYDylRyxhlwRQI4nxVoCXWZI1GfpXGtbSWjYPGP73+NIGupt6TqMsLruZ +ih4xnP5V09mQ+JLd8gn0xSYJnVaVdkook69K34zuUGunDS3Rx4qOzHVIp4rrOMY3NJQI7GivPO8K +KAILt9kZrz3xlebYiu8KCCWb0XvW0NFch6ysfO3jLVjfXLIn+pQkKorl7WxNxIPl71g2dUUdpo+l +pBGvHPet23iC8ihFosrxirkHQUFo0IF4FXI1O726CpKLacCrMJoJLYHAPpTwucHpSRJJ5e4AZI9x +UqpxzVpCuOC8cUpQUMRnXttuB4rjNdsYyeVwfXpmpGmcvcQyafMCFJjPY10eg34BUg4DcZP8jUO4 +HaRq3lLNF+IHet7R7jz7c56rwa2wz9+xhiVeFy/T1PFegeaNPWigDsc0ZrzzvDNIaAM7VpNqdegr +xL4l6kywyRhseZ19lrdfAZL4jxYg3Fw20d63tJsdrDI5rm3Z3R0R0Mce1eKnQYAplIkWrMJ45oZS +NO3PHbNXIyfpSGWowSOasxLUiZdjFSqtNEMkUemKlAGKsRJjAppFAiORMjmsTVrNZEO4cfSoZSOD +1eJ7WXBUzQZ+7nkfSo7e2Ei+ZaMzxntjBX2NSU1Y6/wxqojiEFzkA8KTXYaUoWRyv3W5rSjpNHPX ++BmpSg8V6J5gUUAdhRXnneFFAGHrTfu5PpXzj8S70/aZtxzztXFbv4DKHxHI+H4GZiz9zxXXW8G3 +GBXMjvLRXAx0oPGPSmMVeOnWrMTYpFI0bcg1fh54xmgovRcD3qxETSIZcRvzp+/BpEkqsBUqsM9K +q4Em4Gkxk0yRGXrVW6i8yFhkg+tJjRxGsWrxllkUMh9eK5uMz6bcebbnfG33kPcVkay2OntPKuo0 +nhXI67c8qa7Lw3c+adjcEDGK1paSRhVV4s6A0or0jyRRQ1AHX0V553hRQBz+vNtt5z3xXzX8Qbdm +uic5YnOMdK3l8JnTXvlbwpYl+WySOgrp5YfLOOB9O1c62O7qQkc+9RsKChFPWp4DluOlSykaNruH +ArUgHShFNF2NT1qxGO3NBmyxGcE1N2560CFzjrUysO9JAPDDjFOVuKoQuSRTWouBkazbCa3cd8cV +wF7IISQccHBzUSWpV9C3o1x5b5GAjdQD1rs9DjC3kckbEhqKfxIzn8LOupRXqnkPccBSkUAzraK8 +87wooA5rxMSI3HqK8B8bQl9Q8sffY5b/AAraXwkUviNrw9pH2W1ViMMRTdRjw4HpWNtDti9TPc4P +FQs2M5qdyyMHLcfjV63HTAoBGtap0wK0YxigpsuRDtVhVYd6GQydVwwIqdRnqKCR23I5pCMUW6gD +YNKuetAEise9KTxQBWuFyhrznxNZkXjFeN3I+tTIZg2OqmzmxNF0PO3vXp/g2+hukVl4zyPanTXv +JmVR+60dpThXpnlPceopWFAbnV0V553hSGgRynjC5FujOey14Ssp1HxNmTnc+a3kvcIpv37HoEYQ +QmMdVHSsnVbYJF5jVk0dsNzlruVIsl2wKxbjWrVHILjg1CRbZJb+ILHPzyhfStODWLQgFJFYd+el +UJM27HUIXxhga1Y5lLVLKLkMnoauxnPPrSEx7ShF+Y/n2qrc6xBbhizDAqkK1zJuvG9nbg8ZA681 +ly/Ei052RO3uKAsZlx8QGd8xxvt9Aa1NH8dK7AXMcip64zigdkdrZX8F7EJLdwwNXMkrz1qRMRly +CK4TxmpidWI49felPYSOMmi80NIoOV6qRzXYeA5SskYPfirpfEjGr8LPWVHyD6U4CvQPL3ZItOYc +UDOoNFeed4Uhpks4H4iE/Z5MeleMeGULeLgjds10S+BGdL+Jc9OSBU2Huc5Nc74yvUtrcDBrJnZF +63PJdXvLy/lKWw46bvQVz82jXhkLO5Y+9ZlsYthcRnbIjY9R3q3awTRkEM3WmJI6C0ea3dGRsr1x +XY6TqW9FLHnjrUs0izpLK5DDjofSta3ckH09KRUkZuuTvFGdvPauE1Y3U6Mqbssf/rUxHPTaJPK2 +ZmJPbBqzY6DCZh5xJC9s9aBJHU6dpemJjfEmfetJtI0+VPkUr/unFOxdiextHs33W07YHQHk11mk +Xb3KbZ1xIvcd6LEyWho4Nct41sTPYb16ipexCPPZN+wYGCvH1rrPAEJmvkPoc1VL4kZVvgZ6yFwK +cBXoHkkqinFaVyzo80GuE7WJRQSziPiGdthK5HQV4x4J/wBI8WPIewNdEvgRNL42emO/yj1UHNef +eNpRczbC+I17DvWT2OqJxc0sMK4TCisy41q0hfEkqj8aixdwTXNOlwvmqD9anS9tXH7uVG+hosO4 +/wC0oOhrR0+6G4YNIEzsNEuCxAPNdjZruA4xxUmjINSjURksOlcbqFykbnjFA1sYGoassaknCqO5 +rl7rxhGm7yBnBxuJq0rkSlYpw+NLlsfd5P8AerVsvHEqSBHwPVgcgVpyMyVXU3rXxcHYETAk+hru +/DWti6ZSTyOKzZqndHaxvvUGq2rQ+dYyqR24qWI8dvbr7LqDxyDAzXpvw6FvIxePGSM06Xxoyr/A +zviKFHNegeX1J41zUhXioGbuaSuM6wpCaBHG/EcA6HN/exxXjXw2jL67cv8A3Qa6H8CFR+NnoWpO +I4XI44rxLxrqjQzSEsQM1gdSPM9U1uR1YbmWIdXHf2rmpIb67YS28UrRlsLI3c/jW0VZGUpO5pW1 +jfLNOjahawzwReYI5cjzMkDavHJ5/SrVv9uhtPtVxCPLBwzxnlT9KGghLU3tKvvPjHzbl7EGuisJ +GRxWLOg7nRXJEbDjmvSNK+aFSfSoZr0KutRkphc4NcRrdkVjL9aVio7Hk3iqS8ubhrWzUlsZY9kG +cZNc5D4aee5MclzJIFTzHAO0MfatqSOWu7bFS1srDUZEis0vIZoUxPvfcC+4/dx2xjr712XiTwXb +WmlQ6hol3cRhoFd4rlg3zY5wR0GelavQwjq7GD4etdVvSnk2wAB+9v8A8mvcfA2kXiRo0/UdcDis +ZnTTulqeoWqbUAJqWUb42X1FZlnjfjSwlGrr5S/eNdD4RkvLAAQ4yRyaUZcruVKl7TQ9I0G+mnzH +ckFwM8VuIK7ac3KF2eXiKapz5UWYxipNtMyNejNch0jSar3cjR27uoyQCRVRWom9DxTx54gu5fMi +lbKdMVjfCZPNlv5v9rFbVHpYqjGzbOn8SzFI9o715L4u0r7arYzk+lYdTqSujy7U/C0u4vHk+WwO +xuh9q3J9dgvbdVukMV1EwbDDgn04rZMwlHoZ+orZ6hfQ3RWVnQYCgZAq+8U0ln5NtBsV2yxYcfgK +JtW0CnB31LlroVwJ1nQLGDjeP7w+lb0dsFxjrWB0tHS6NuWPJ6A16ToUm63T3Gallr4S7cxiTjrX +PaxaF7dlVeSMUhxZ5jd+H7qCa4eF3DSE5x3zXN3Wk6jbyeaiFWUY6ZyPStYS5SalPmVipFbX0E4c +W0alvmPHJrag0rVvEE6LdljGpG2NRtQD+tW5XMI0uU9M8NeFo9PiQhecDIIrtrOMIoG3H4VlJm9t +C6CB06VPGM1IHLeItGS6uw+ORT7e3jsbQvj7gzUNam0JaWE+HN7NqOqX80n3FO1RXo8YzXdS+BHk +4z+KyzGPapcU2YIv7qQtiuaxvcaWqG4O6FwfSrS1JbPnrxoxkv7qIfejcitj4V2f2exumI+8+aKn +xHTT+G5d8Txlm4rjLxMsQwzWT3OiK0Mm6sEkVsAcjFc1d+FEmlGwEDPQVopaEuOpr6f4ZWNAu3tW +vHpAj5ZQcUFIWaDjGMVUMQ3cVDBmvbhY7QAV2nh+T/R1yeKhlrY31+b61FcQK6nIoJMi401WblRi +qr6PCw5UYq9y+YgOgWzNkRrx3xWjp+nx2v3FQcelAbmko9anQ4GBUNisPHWr1qMrQhS2K11HvmYV +hamcxSRZ5xRIqluS/DKAQQXZxyXrvo2FdlL4EeZjH+/ZbjNSZpswLNBrE1Gt7VE4ODVIlnh/j61F +j4lmeTGyUbq6LwdEqWbeX0YbhSqfEddP4Bddj4JIrhL5d8h7VjI6oLQqKNzelWre3yc4/ClFjaL6 +wqBxxUUxwCKu5BmXRA6c+9ZjP83FSBoQuPs4BrsNBlUW659KmRrDY6G1lyQtW3Hy0lqQ1qVJnAbm +oy3b9KYJCqRj3o4zRctIlhjLHmpSuOBRbQOpLGpPFaES7UqkZzKN1KsEc87/AHUUmvPLTVGv72aQ +k7WJwKmRrQ3ud74Ltilgz4++2a6iNDXdS0gjyMU71my7GpqTbxSbMki3SViajTTHqkSeR/GeyZmg +nQHkEE1S+F+oPPavBL96I4/Cia1udVF+4dVrkW+Fq8+v4tjMDWUkdVJ6WM0cNV+F+MVmjUcZgqnP +1qpNNnkcVRLiZtxIS1UzzIF7mghlxUZpVQdq6nTVdAoAOKzkbQWhvwM6gMM1twOJYx3NOJE11Kt1 +H1/pVVlwBkk+9NocXoOQ45FPj+fkUJFF2NSB700v/hTEty5ZpkjvVyUgcCq6GM9zC14/8Se6GcZQ +1574Xs5WkI2HBPHFQ1dm1KSSZ7Rotn9l0+KPHIHNacae1dy0Vjxaj5ptlhVp+2s2CJ9ppCKzuWNx +zSFc1SYrHNeNdIGpaYw25ZeRXmvheyk0jVpEdcLJ0q3ZxNKTa0O3vQHg/DNcHrsJDmsmjspnNzNt +fFIJ24GazOhC+azDmgZIOOKBsp3J2qSaZodubq58yQ4QAnmhGT3NO18pb7BORmu205LfYpyKVkWp +Oxr5gKYWoIZWgfGfloFq1qTPLubnGO1RPtxg4P0oBAkY/hBz6VNDDkZ6AU0W2WSdqkdKr9ZOaGSj +VtcLHmnOcgmmYvcz7mBLy3MbdD1q9ouiRK6bUAVeelOC1InPlidSsWMDFOCEdq3uefykqrinYqGy +rFvApMVka2DAowKAsMkRXQqwyDXn/iWyitNQ3qPl6itIvRoF8RXinW4tQ6HI6GuW8SIVBPalc6qe +5x9x97r3qruwTjrWZ0ksZ9TUmcDNAmZ9/wAoao63rR0+w22MLPtAzt6mghmfofiB76LdJBJBIp5D +d/oa7bSdWLIPnpDi9TM8TeKdas51XTbIyxd3J/pXS+E/EFxqNoFu7do5OmD60maHWrnZyDRkn/69 +MlEyOR0xntVoNx+FUgYjPxg4FLCuWDZyKQr2RoRnP0qO+nEFpJITgAUzLqZnhu6+0rknOTXpOmwJ +Fbrt5yMmnHYyr6Oxb2ijaKLnPYMClwKQWK3n0hn+lachHOJ9pNNN0apQFzsY10a4v4hXQh0xpieQ +MA1XLZNjhK80cT8OdV+3Wl3A7ZZJCw+hrR1qLcjZ/CsbnfHRnFXseHJArOYYbrUs1uPhYbuatqFP +ByfSkMq3UIINYkto+87Tx6GkSxfsDbflGD7CtTw/pk4nzITtPIFMFudsukh4Rxz71paTpKwP5jcn +0qTRy0NORMDgVCqewoJTJgAoxjntTiTu7fWmFxAcnn1q3EPl+X8KZMi4gKqB1Peob/Tv7Us5bfeU +yOoq4R5nYxqT5I8xieH9J1DTbvyJELRg8ODwa9Ms5mSFV9BWiptbnNVrKdmif7Q1KLg96XIZc5Is +pNL5pqeUrmMtZs0jzV08phchaY00zH1p2ZNxjS1g+LdJOt6U9ssmxjyGp2urDjLlaZzng/wUPDqz +TSTmWeTrjpVjVk3Rvjr2rnqQ5dDvo1XUd2cTqSNk9OKxXGCeKxZ1DAxHTr2q5C/y8GokUhsz54qu +uCxzSQjQ0+FZblR2ro4bZYiMVQ0dBb7Qi5x0qzuG5QOh71LYErDufpSeWrHnimIXbjkUjLkH1Hem +gGxryc+tXI19KYmWegq9YLiLJ7mtqS945cS7QsWehqxA9dEjz4krPSxyZqbFFhGxUm6smjRM55Lk +HvSvNxXTY57kLT+9MNwKdhXGm5FIbkU7Bca1wMEVhaiuQcVhXWiZ14R6tHGanGBI2OtYkqEHjgVy +s9ErEeo6UBsHipKEZs5qpPdRxcbhx70NCSuybTNWihc5brW9Fq6vjMnFSdEIdDRi8RRKygZbHFbu +m6nb3RA3gMegNJhOm0jbXGOoxTuCc1Rz3FyoGKawz9KaAVcZqeMgCmIkB4FaUTbYwB6V00Fuzixb +0SFMuDU8Mlbs4UPeXHeiOXkUrDuXYnyKk3cVk0ap6HMxxketSMhrcwRC0dMMZFMQ3yzSeVQAeUaz +9Vj8uPd271nVV4m+GdpnHX67pCeKyLtBtNcR6xlk9RVeWTb3qRnO6trgttyIfm71z7ai8j7/AJmN +DNqUVa5Yi1AnjynHuBV+11YJhWWXcP8AZNSzqgmaEerSsf3NtIQP4mGKtRavdRgMIpVI9KjU0a7n +R6T43uYQI7qN2Tpkqciu503VVuQGAYZHQjFVc4alPlZrpKGAznpTwxOc9+lWjIlUACnM4XApiLNk +nmvnsK0NvpXZRVonmYqV52GsmanhXitTmFkSiJTSAvwrxUxXIrJ7miOfjf1pzNWxkRlqYWpgJupu +6gQbuahvIxPA6eo4pNXVioS5WmefakGhndH4INZs5DJXA10PaTurmLO21uKpSZqGMoXGnRzBiyjd +9Kx5rcQS428fSkjanLoaOliHGZFB56VswW+mtPufcBsGOAfmxz+tFkd8HpoaUx09FAtFY8DO71qb +Sms/Nb7RbecG6AEjFLS5c78t+p0djpVs9wsyQiJAdyr1rW+zqjErzSe559Sbk9S3C+MA1bjbgE1S +MSXzMVG0vNUI2tPKrAuCMnrVzNd0PhR49W/O2xrHmp4TxVMzQshpIzzQBehqesnuaI5VGzT2bitz +FEbNTC1ADS1JupgG6l3UAc14s04yR/aYRll+8BXCtLncDXFWjys9TCz5oW7GddH5qqNzWDOgQnC8 +VSuo1kHzAGkPYopEY2+RWxV23Vzj5G/Kg3jWaNazhZuqNXS6TaKhB2c0jR1nJWOlhOxRxU4YkCgx +Y0OQatQyDbyaaFYe8uF4NY3iC9ltbVGj43NTIL3h7WzMihjzXVQXYYDdW9Cf2WcOJpfaRZ3g9KsQ +mupnCLIabGeaAL0LcVY3cVmzRHIxtUhetzEjZqjLUAIWpN1ArhupwagAfDKQ3Q1594v0c2bm6tx+ +5Y8j+6ayrR5onThp8s7dzkZjuqAAmuBnqC7c0iwgtzSA0rWzjfGRW3ZadDu4AoNYo2rfS4v7orSh +05UA2r0pDbsTm29KRottBNyJ0wpJ9KhD7f6U0ikNWffIFBz60zVUW52ow4UcUN6EPcx44WsbgOmd +ua7TT5Bd24KHnFKnLlZFSN4koluLdueRWvp14swweG9DXoxldHlTjYtzGoo25qzEvwtUxas2jRPQ +5CNqkLVsYoYzUzdQA3dSFqBBmnqaBhuqhriCXTpVIzxUz+Fl03aSPI9QTypW2/dz0qKNw3SvOPZR +Mqin8VLKRcs3O4Cuk0w/MDjt1NBtHY6O2IIHY1pxgFaETIRwMkjtVSUEk4570MlFW5bap6dKzWm8 +1tqH8aY+hp2FvGoGayNevVt7/ap4xzUvYjqTLtvLPcvJxSaVcyWsxTnFZlnT2t15xHmCtOBYwQy4 +B9q7cPO+jPPxFO2qLEj5HWo42+aus4HpoX4W4FTF+KlotbHII9SFuK0MUNZqiLUDE3UbqBBupwag +Bc1DefPbyD/ZND2KjujyPWlKzuPesRZjHJXms9lMuw3StjnmphKDSLTJ7OfE3JrpbO4GQc9qlnRA +3LO82k5NbFvdADkjBoCSHyXIIIzgVQvdRigT7wzjgUzO1jHknlvG7qnp61etYFQDIpCZoqVijzXn +3iC8EmsOuaCGb/heR/s0ijkVv6fbxy3QMg5xmsnuX0Ldzut3+UYTPWk+2GJSe+M1pFtamcldalmx +1eO4XaThhWnC+TXqR2PHqL3maUJ4qRjxSEjj42qXdxVmaGs1MJoATfSbqBAG5p6mgAzTJTmNvpQU +tzzHXY83D/U1zF5FhjgV5r3Pa6FMsV5HWnLe7RhqBRdmTwagN2d2K2rPU1C5LAnPrUs6Iysbdrq6 +f3gK0BrUKj/WClY05iM6xLOcQAj3NT29uznfKSzHuadzNu7NSBFjHNSm5VO9IRnajqoWMhTzXFtA +bvUfMduSeg702Qz0rS7FbTToQFwzjJqaGTFyfK5PQViyzUuFmuIdgGABya5u/vTaN5cnUHFUmLoZ +zyskwlgJweSK6zQdUEwVJeGr0aUrxPLxEfe0OrhPAqVjxWhznGRtUwatDK4jNxURbmkAm6jNABup +6tQAFqhupNtu59qUnZFwV5JHnWsHdIx96w5lz15rzT2uhRmt85xWbcxMnUGmZlB0bdxmrNvFIcfM +350mWjbs7YkDJY/jW5ZWW4jikWkdNp9mqYJFaJdEHHakUULu/VB1rLn1Ld/FgetMGYd/qWSQmSa0 +/AemS32pfa7piLeLkg9z6UmQtz0W7uQ2cZx0A9BVzR7cAea6j2rPqX0L99KRat5A6Dk1wOoKZ52a +YfMORTYRLujiGWEq6/NWza2yKQVHNdOHerRy4laJo6TTnbbtb8KuM3Fdh5z3OJjbmpt3FaMxAtUZ +agBN1GaQBzTwaAAms3VbjERUGsa07RsdeFpuUuY4jUjljWTKK4j02RE4IpJYFk6imQkVl0xWarsO +mAEcUi0bNnZBR0rWtoguMCkUi21wI161mXuocEKaYXMS4u+pY/hVCSWSY4HT0pEmlouiSahdpEBl +mOceleiwWcNjClvHgJH97Hc1EmVFFi3Czy7mwIl/WtJbjP7uLgd/apQ2VNVvtsBhiPzdK5S4nAuR +nqOCaTGi9pcytPlU+XpmumtWII44rah8ZjiNIXRuWeNvvViQ/LXpJWPJbu7nCRvVkNxVsxBmqJmo +EPiXca0YLMuOlJsuKuPlsSi5IrNuG8s4HWs5VEkbwoOTKsk+FJY4rC1K53k1xTk5O7PSpwVNWRzt +4cms+WpKICtSLTETQj5q0YeBSGiys23pUguGxQMq3E59ayrm4x3yaAKiRtO2WPHcmhruKFxFajzZ +ScA44qRHoXhuMaLpxaUg6hcDLMf4F9KlhuDeXGASIl+8azZslYma68y48m1+7nFW5rtbRNhb5z1p +iMKbUg0zuW4A4rPgb7VdKXOMmpA7HRbMS7nUYiUda0lkQOBngVrS+JGdbWLRt2bAx5BqeQ/LXpnj +PQ4GJ+ashuK0MhWaoWcA0AaOmASMK7jRNPWYBmHyiuepO2x10qfcv6vYxCzYqoGK4HVYVTJrmb5l +c6oaM5TUJ8EgGsG4kLNUHT0M64OaqMMikSRsuKbnFMRLG3zVehOaGNE445NNlnVFpDMu6uie9Vo1 +8z5mOAOST2pDK91cNN+5tsrH3PrW54a06KxT7fdrlh/q1Pc+tJ6IUdZGvHPLezMcnBOWbsPap5r3 +ylFtbdT1xUWNWzU0/Zbwlgfmx8zGsHWtRHmMqE59aAMyNifvHPc1f0gtPdqkY5JosJHeNci2tktY +euPnNY+oXWZEVJNrZ9aun8SIq/CzodHuriIokhDIR1ronbKZr0o6o8ipoz//2Q==`,Rt=` +/9j/4AAQSkZJRgABAQAAAQABAAD/2wBDAAsICAoIBwsKCQoNDAsNERwSEQ8PESIZGhQcKSQrKigk +JyctMkA3LTA9MCcnOEw5PUNFSElIKzZPVU5GVEBHSEX/2wBDAQwNDREPESESEiFFLicuRUVFRUVF +RUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUVFRUX/wAARCASwBLADASIA +AhEBAxEB/8QAGwABAAIDAQEAAAAAAAAAAAAAAAEDAgQFBgf/xABDEAEAAgECBAMECQIDBgUFAQAA +AQIDBBEFEiExE0FRBiJhcRQjMkJSgZGhsWLBJDNyFSVTY3OSNEPR4fAHFjWCokT/xAAYAQEAAwEA +AAAAAAAAAAAAAAAAAQIDBP/EACARAQEBAQADAQEBAQEBAAAAAAABAhEDITFBEjJRIhP/2gAMAwEA +AhEDEQA/APqYAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAKNTq8OkxzfNkisQC8eb1XtRNbzXT4q7eU2nu0MntRq/D8StMccvW29ZmdvgjsTyvZjxOLj ++s8WLxn8TFPXs6Oj9oct7c14rkxz22nrB2I49KOdTjelmszfmpMeUxv/AA28OqwZ4icWWtt/SUi4 +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAmdo3nsPNe0Pt +Fh09Z0+DNWL7+9O/7A3eJcZppsV5raI27esvH6jX5ddM25p79Ilo59VbUZOe2Tm/PeGvfPfT2iKR +PLv1+DO678XmW/a97U6TtOyzTbTF538/T9WjTNecm9a7126tqk3rSYxY5ta1plRZqZNXGjyZcPXl +mZmsx+qjBrsuO16xM7eXRt04JrdTltk5OWJnfaWf0a2lty5MdZnfzSn+WOHiOutFpjHa9e8bQ2fp ++alYy462pk7zXbuxjPesbRS0f6ZZV1ET1tErzXFLHo+A+1ddZf6NrI8PJHa1vN6iJi0bxMTHwfOa +zhzd61v1846utwniM6DUdb3nBaNrVmd9vjC/ZVePYirBqMWppz4rxaPgtEAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAItaK1m09ojcHnvarjM8P0vh49+a/eY8ng9D +h1fGM1rxjtGPfvbzdbjuTJxHX48cTPNltM/KsS9Dw7S49Jp6UpHaGe2vjz1y9J7LYK13vHWe7bj2 +ex1tvM80ekuxW3RnW3Vm6P5jRx8H0+OYmMcb+bapo8GKPdpC6bQwtdHU8JpWkdJ/JweL6e23iU67 +d4dubSqyVi9Zi0bwIs68XGp36TtEq7ZJmZmevzdbifCKWtbJinkt6eTgZPFw32t+sRurbWVzxs1y +Rv6T8V1NZNPtfq0seTm+Kevr+SZuxXjvaPiV8N4viycto9HseG6+uu08W6Rkj7UPmFck1tE1nlmP +Ld3eA8V8HVVi1pjq6Ma/pnqce/ERMTETHaUrKgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAADW19+TQ5p/p2bLS4v04Zmt5VjeQeJ4bjnLqsupv+Ka1+ERLv4reTmcNxcuC +vy3l0qdI2hlr66sT02ot0ZV7qqrInruzrVZLGSZ37JjqgYTG0K5lbaFVhDT1Ub456RPweY4hixWi +eSdpjvD1eWejz3FNHWYtkpvFo9EIseb3tS3SerOms22rfpPqZKzvvHSYUz70TExG6Gdbs2rljeJ/ +Mx5L0vEzPaelnOi98c9J2bFNTFpit47+a+PVUvx9T9nOIfT+GV5p3yY/ds67wvsXqpxau+G09Lx+ +r3TqrEAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADV4ljnLw3U0jvO +O0fs2lWqyUw6XLkyfYrWZkHldBEV09eveG3Fq1mI3jd4vPrOIaid8G9MP3Y38k6fNrt/rMk9Ou8s +tfXXn49rGWInuy8SO/k5Gl1E3rG/fzbOe94wTy99mbRvTrMOOvNfJWsesywniukrG/jU6fF43WYN +TmtEeJtEQ06aSmK2+bNtEd+qfSO17unF9Hmvy1y13XWyVmN4tExLxVK8PmNq5NrT58zawam+m/yc +0Xj8NpRYSvQZ7xEOdqI3rPozxayNRXe0ct/ON03jmrKB5nV4q1yTO20Obmv4c+cx8HoeI6WZpNoj +q83niYmYscU0r8aJ6T1n49zeJ+Meqm1drb9J+Kd5p136StGVem9l9TbHxLDFp7W7+sS+q1nesT6w ++PcAzVjiGHftzQ+v4f8AJpv6On8jH9ZgIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAABp8VrW/C9TW0ztOO3b5Nxp8VmI4bn37TWYB8f1HFtTfUfR9FWJmsdZ9I7MtJxDX5s +d8ta1y0xzteaR2277rcuhycP12SceLxMeWNpjttHwlu8I0mfQ1y+D7k5YmJmY36T36Ka43z/AF1t +cI1ds+qxVj7/AEej19PCw9HJ4NoK4OIU5Y35YmZdzVTGebVZabx5jJS+Tmns81rNLm1Wrzc9rVw4 +Yibbem72mXTTS0w0M3BvEta1bWrM95ie5EanY87wXgNOL6XPfxraXLhra/W28bR/dzYzarBqJxRe +bzE7Rt5vWU9n8mPHOGmS0Ypnea1naJb+k9ncNLR7u2y/WcxXO4TOoyUrN6zD0FaW5Y3hu49FiwUi +KxCvLMR0hlW0jn6ukWw3iXjOJzbDlneOj3GaN6zDzfFOH+LE7SRGo83XNSZ2lbG2/WfdlvaT2cy6 +rNFInlrv1mfJ37cK4PwTTxOoidRm2+/2/KFuyMp47XB4LivXiunrH2b2iH2qn2K/J8x4fGDNxTSZ +9Nh8OviRvTyfT6xtWI+DeXs9MNZubypASqAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAOZx6/LoOWPvWiHTcf2hiZ0e8fc2mf1E5+vP/AEeuSd7RC2uKtI6QjHfeINTfwtPf +Jvty9WPfbt/lucP03gxfJf7d/wBoReYpm97zaNeLb4Ims9Nt94auDjem1Wo5PFi1onylS+1o7l8V +bxvtupjDMdNkYtXS1+Stt+m63xImEJ4xjHER2ZxMUjeUTO3VRmydBbjLJqPi08mbeVOXJPq1sl5Q +Vbkz9+rRy35rxHqzmZlVEe/Ez5LRlW5iyfR6zffaIjq1OSNZps2a21rZInafSPJhxGMl9LStLRWM +lorM/A4dkrWbYfLZC2W/7K6eubX6b4RzT+W76K8b7G6X62cu3Sten59nsm3j+OXz3/0ANGIAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA0OIYfpOHPijvNNo+fdvtXJO18k/ +/OwPFYbz2ls3jx8VqW6xMdWPEdP9D4lkx/dt79flLLHbkxTPwY6nt2512ORTRzE2x4/dpE7cvkme +E4IrW3hRMxO8THRtU1FKWtvtvK2upx22rzRCtXkqzh2jtF7ZbT122b01ndnpuWuP3Z3+Ky20qDVv +fauzVy3mejZzNK8dVjqi87KLRLYtXruqvXzkQp7Qoid88R6rcl+WGlW0/Sa22mfhCZOq2x082ix6 +jkm822pO8VrPdr4dNObVeDo8XW3uzMbzK+mvxT7szE27cvnu9j7PcNjSaXx8mOIzZevbrEeic5tN ++SZnpt8J4fHD9HXHO3PPW0x/DeBtJxx29vaAJQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAKNRim9Z5e89Nl4DzXtVh5babURHrSf7f3ec1+qnDorWrvvt5Pccb0n0zhmWk +Rvevv1+cPE2rGTFNZU26PFfxwa5dVkjelI2772nZnX6bbrEUq3o0d678u8wmuDL2ittvVjXdneeK +cGv4jpJ6U56+kS7+j118+GLXpakzHaWlp9NNY3tv+bbiYiNoQy1y30uyZJlrWmZnuym6q1iIJnop +yW2Te8bdWnnypQqzZOadokiIpSZntWN5lrxki19vNRxrUeBwnNNd+fJEY6/OejXLn3Xe/wDp9wyn +E8uo4lqqxblv7lJ26T6vpD5X7G8QycKzeBMbzMRM1/FH/wA/h9QwZ6ajDXLitvWzRgsAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAeL45w+dDrZvWv1OWd4+E+j2jX +12jx67TWw5Y6T2nzifU+rZ1y9eHwzDYxxEy18+DJodXfT5o96vafWPVbjyxDn1OOzHudbM0rt2UW +iI69mVtRXZq5tREb9VUoy2iIlRbJ0UX1VZ6btTLrI7V6yk62M2oisT1c7JmtkttVMUyZp6x0beDS +RWOvdKijDimvWd3G9pNRMfRcNfvZOb9Hpb0itJeP47k/3hgjaZnbaP1XxWW3T0movbNS0W645nbf +0nrMPpXs3xamoxdJiLbe/X1n8Uf3fKsOTw4jbaXo+EarJhtGTHMxeJ6xH7Sti9Zaj6x3HM4NxXFx +DS1mtoi8dJrv2l011QAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AGjxLhODieOIye7kr9m8d4eM4to9RwjPXFa0ZIvG9bR0fQXmPbDFvTTZPOJmEWS/V8bs9R43NxLL +G8eFbePg1bajU5/s0l1ceKLx1hbjwRE9mOpx0y2uRTSZsm3PMw2aaKtIjo6kYo9EXpET0hVLXxYK +xC6MZvyx1lFs0RHfaPiCnU12pLyHGNDbUajBekWma2npWN3p8+opa20e9LSyZLxExTlpM+vdOdcZ +a9tPS8MyUvFrzWlI6727u1pYxYrbVmb7x+TQx6au3Nqcl7/0rcmW9axGnwZJj1novmxnZXV0fFp4 +ZxLBPgTGK8xzXr5fOH0bFlpmxVyY7Rato3iYfNuG2x56Wrqa8s2jz+7Lu8O12bS6jkwzN6THNNI6 +tvrN68Y4rxlx1vHa0bskAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAA4XtTTm0OKfTJ/aXdcL2pyRGjwU362yb7fkJz9eTxxyZJjyltRXzUZK7TFtl9Lbwy06YzrHwa+ +fJFd/wCVt8m0bQ0eS2qzcm+1K/an+zNZFL5M1pjFXeI72ky48eGnPkvNp27+TPU6nHpMfLXaIjpE +erk5dRMxOfN1mPeisfshW1ne1a1577Y6x5R3U0zze31FOWI6ze0byU098kRlzbxM9qrMlPDpyRMR +Md5Vt/Ihp5898mWZm1pjftE91uCt7fCI7dWeHDEW3t723l6rslqxWZnasR+SYhFbzhnfxJ2jyeq9 +lcGXWZcmW0zWKxHLaI7794eJx5fpfEKabT8t8l5isddo3l9S4VjrwrRUwzSJt3tav3pdOL6Y6dXD +j8HFWm+/KsU4NRXPvtWazHquWVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAa+fXYNP9u8b+kdZBsDkZOO135cWOZn4y5Wu4xqctbe9y19Kp4njt6vi+PDm8DFMWybbzPlV +5PiGtz67UxbNbeKTtWIjaIXYpnwuaftT5tXJT3vmi1pMsrU5qIrG1V1a+5DCa7b9GFbRr5J6Wnbt +Cu+Wmk0m8956z8ZWZNorbfzcbX5rZslazPux3hUt41NTntktObJ13+zX1bek01r4/HzVm0bxPXy/ ++bNfDgjVa2uOY92kdfg6ufJOKvLXtttVVSqbcta2vM7zXtHpLQy5ZtMd+vWd+7Zy3mdJHXra3f0c +vUarw7zFY5rT2hH1Lavnrgx81p3U49Pk4nE5L35MO/StfNRXR5tXnrS8W67WvfyiPSPi7uLHFK1p +jrtSsbR5Lc4RzsXBaYreP4l45esRD2HD9fnw6evvWvO3Tfr0aGk0U55ra0TFInv6uzgrXFXlx0i0 +77RPlC83Yj+JW7oddqr6vHzTTw9/f6dod+L1t9m0T8pcbFSmPHER3892W0zPuz+jSbVvidkcqmfP +Sel7bekrI4n4dZnPWIrHeYnZee2Wpy8dEaml4npNZblw5qzb8M9JbYgAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAABEzFYmZnaI7yCXL1XGa0jJXT0571nbee27DiXEprp8nhbxG20W8 +5cbD0ikfnKO+urTPvjoZdXqctdsmTaPSvRpWmsdZ6yztfaGplvv3lWW1tyRlz1x0vkn7Vo5atTNe +Y0+1o79V2KsZsvX7Ne5mwxnyTNvsx2iGneM/rCdRSuOsTasTt5kRFtpjqmOH4t4nk7estiMNa97R +Hwhna0iuKTEdmGWa4672nZtRele1N59Zlq6vLOSsYorEc07qcW65euzRvtXvPZy52naZ7ujr6fXV +rWdukREK8+njHgmZmPc67bq6ivVWhxxgxZLztNrT1mZ/SP4VZs0zaOvfp84WUtNsXLvtv3699+rU +z7+Jtt5qURqMnPpctaR1rMSw4ZoK57eNk6xHaJRh97Ltt7lo5Z+L1HAPZvVauZ2nFTSzMTzeJEz8 +to6xPfvsZntPZ9rXxabmxzefdrv0j1dXh/BcmstW1qxTHHasR3+b0GPhGl+kWmd64dNEVjf73T7X +y8vy+Ddx6O3iRakxTH5RXrMw1/lX+3Itw2MFIraN48qRHdZi0cUjmmPen9noox1iO0fNzdXEYrTt +stcmd9aX0bJ+HePmiKTitO8TMLZ1cVjrMfqpz6ys4pjfrPRWZ9rXXptUit6zO+23VyaRHEc05L1/ +w9J9ys/en1ljqdVbwYw452tlnl3jyjzbmmiMeKtYjpEbLeTXPUU8ee/+qjJpsV5rbkrFqzE1tEbT +DpYNbW21Mnu29fKWna0KbqTdjXXjld0cvQ63ltGHNPSfs2n+HUbS9c2s2UASqAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAOVxPWe99HpP8ArmP4b+r1EabT3yT3iOkesvMVtN7za07zad5l +XV5GmM9vVfEstvDx0jtaVVMlq+UJ18b5cMRvPeSuK87bUt+i2Z3PtG7zXpjkzXt6R+TXyTMzvM7t +ydHqZ+zhv1+Cv/ZuqvPTHMfOYaTMil1a1K2vHSLTELq2v+KWzThGo84rH5rq8JzedqR+ZeI7WnOS +34pYTafWXR/2Pln/AMyrKOCWnvmiPyR6O1y9585lhWJvl557Q6eo4T4dYiMvW3b3UanhldHpJtGX +e09unmjsT7eb1l4trI2t0hsZfrdNO0bzy+nzU20/+NmkzO9esz+TZxWis9dttvPv+Tn21jjaW8zn +26bTG3mp1M/Wzv3t0jyWXiKZJmsTERaZhXXDbNl8WaztWenxZLstPp5pau8frDtVrNMM5cfTfpMf +3aunxxbes9d/R09Dp8ebJi09ptFr3jtt2WyrW9wy1Jx132mK+Xq9PotT0iIU19ntLtExa3T47T+q +6nBaYvsZstZ+cT/LeMnUi0TXffo1s2m8Ws2/OIMWk5Jib5L328rS2t94Sh5TV4ppklpW6PT6rh+P +NbebTHyas8E081mZy5P2W6OFhjxNTE/hr/LoRO0Kvo9dPqctKzMxEx1la5t3tdnjnMs4noievcrO +yZjeFF1OSnNV0OG62cn1GWffj7Mz5w05joovzY7xes7TE7w0xrjPeex6Ua+j1UarBFu1o6Wj0lsN +3JfQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACrU5o0+nvlt92P3BxuM6nxNRGCs+7Tv8 +2hToxm1r3m9utrTvMsonqyt7XTmcja0u3O6FMfi5t/u0/lzdJM81p9O3zdvHTwsUR5+bfPqOfX1h +dqV+3O7bs1+T31oqmI3TEM4rvCdkDGIIhlFd2daboS0NXG2bD6bufxXU1vlmu/u4us/N0+L1tTSx +kr9qk7w89j1FNZMV3jxLzvaJ8mer+LSOZqK2xZotbvljfr/89U453rXt9lse081xZtNjx7TGKu0t +DHlrevSevaN5Y6+tJ8c7VRNMt63n3ub+6/R54rERMztDYy4a5omclYmfxKcenrjtHLvtPrCnVmdb +eFe3JXmjy6eS/DrMuLVYsta9Mdt++6qLxO+0dEc8UmInr18iUfReHcXrqccb9Z27Q61Lb13eJ9nc +1Z35rTvE9avY4bTkpG8xEfB05vYxqybc07R281naGMREdoT5JQqy9mply7Q3bV3iXG1eXw7TWSka +c258t7+tpT5/BjT7MfHqndz12Z+M4lMMKyziUJJiN1WSu9fku23RaOgKNJqbaTU1t9yelo+D0cTE +xEx1iXmM1Nt3W4PqvFweDaffx9vjDbGvxz+TP66QDRiAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAOJxzU73rp6z296zsZMkYsdr2naKxvLyObNOfNfJbvad1dXkaeOdpvsc2yuZVzfbfqybutwu +s5s8R92J3dvJb3tnO4HSMegtmt3nfZvYp8SZl0z45NfSK7onH1bNcfRFqnUKJr0Y7dVtq7prjEsK +0XVpEM6028mW20IHK41aPo3J6zs4ODhdcvPnvExFevNXpMOrxi/PlrTee7PLX6Pwa09uaNlKtHg9 +dM3z5d7ReOu02nu0JzZMfblrv5R5uvrcdImZ26T1mYhxs1Os7RH93PZ7axuafNfLitvbaYU3yZYt +PXs9NwHhui1HBa5LVicsb81onrEuVqNNSuS8Y67dZ6xPZa59Il9uX41vEitImZme3q2Kxbxora0T +Md/ROSa4Ztkj7c9OafL5LuGYubmyX3iu/TfbdSfVnpvZLT/XZK233+Mbbva1xRXyiPk8pwbH4N6T +adq5a71n0tD1WDL4tPe6Xr0tDpz8YVnJHWEXYxbqlBedoef4tW0XraO09HdyztSZcbUz43C+ee9b +SVMaeOfqq7+jGckQ1Yz7+7v2RN/WXPXZPjci2+2yyJaVMuy+uSJlA2d+pNoVRbeDcSxyTE+TDDlt +pdRXLTynrHrDOyiyZeVFnY9TjvXJjres71tG8MnJ4Nqt4tp7T1jrV1nRL1x2cvABKAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAHJ49qfD09cNZ97JPX5PPw2uI6j6Vrsl/ux7tfk1mWr7dOM8iLdm +vfebREefRsWldw7SxqNbWbR7lPesrn3Vteo7dYjDpMGCvfbeXQ0uLlxRLRxROfUc34p6fCHYrXlr +EejqrjY8uzCYW7MZjdVKqK9VlaxCYrsnYExBMRMJRPZA8/xPHtmpP9W2xx76vhWOInvt/C7ike7N +vwzE9kcapGfhlevTaFbFo8RqJ5vy8/RoW09ek0msxHfp3dzNoLzp4zUmZpMbT8HJyYJi20X2n0lh +ZY1li/RaidBF4w2mK3jrHaFGp1lN+tptPp5IjBkid5mIp16TKu0abBPv33vPlM7z+iPdFNcWXU5I +tkrNce/b1W5db1nTaf3ax9q0fxDW1ebNk2phty1mOu09VOm8W19orEz23j1TwfSeERFuEYMddptW +d43dvBn21eKJ75KbW+cf/JcTgMxXTb3nbljz+TpcPmc2uyZO1KRtVtGVdi0bx07qJnllsRO6rNTe +N4XVamsy8mnvPwc3R2jPwe8TPbdlxXNOPSZfhWWpwO85OFzv57qrODkzeHntSe8Sn6Rv0a3EZ218 +8nXekfr1a0ZLVnqx19dWb6demXybOO7lYMvNMdW9S/VVLo0us7tPHdtUtEwJiZU3jq2Jhham8CVG +PNODNTJXvWd3qcWSubFXJWd4tG8PK3pPd1OB6veLaa89Y61/u2xfxh5c/rsgNHOAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAANLimq+i6O0xPv392rdeZ4rq/pOqnlnelOkIt5F8Z7Wj27I2I6sb25YY +V1ImY3dbQ08LRc23vZp2j5OJG+XJWle9p2h6HHtbJXFT7OOIpX+7TxT31j5rycdTh+Dpz+XaG/sw +w18PHWseULN2trBE9UcrJKBhFU7JAQi0dEomegNDUYovM7x3jb5tO1ZvpbaTLtzRExWfWPJ08kbT +Ex5NXWYYyV5omYtHWJieyeDzuizfRs19Jn6TM7Ru1uMcJxZqTkw+5f4ebqa7SV1MR4tdrx2vEfy1 +axqsNOTLjnLXytVXi3Xj8+nmsxTLM16d5npPyUzpekTtSK+U7vS6vQ/SYmK1vWPS1HOn2dvvvvE/ +tDO5XlcO+LbfHSd/W3o6/BdDOXPTnj3Kz38rS6Wm4FNrRyRzTH3p6RH/AKvR8L4dXSzE3jmtHn5I +mbfqLV+m4dbLSsZInHjr3iI6zLpYaxS01rHuxHRHiT9mv6s67Vj1aqL6326MrWiYa+/Q54BxPaGe +XRZpj8MquB4+Xg8zPnB7SX30to379GxpK1xcHiKz5IS8xr8PLPixH2bftLTy05o6dHYyVjLhy0t1 +izjZa3pMVv3iO/qz1G2L+NbSajbNyW7xLsY8kTDz+fJXFqKZN4iZnafi6WHL0iYlStI7OO+7axW2 +crFl7dW9jvE9ULN+J3ZbdFGOy+AYWpEqN7afNXLj+1Wd23KrJVMvCzseh0+auow1yU7WhY4fCdV4 +OadPefcvPuz6S7jol649Tl4AJVAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAV581NPhtkvO0R+4NPi2 +r8DB4dJ9+/7Q83Po2NTqLanNbLfvPaPSFDHV66sZ5ET0hRknyW2lTtMyouz0c8usx2n7s7vScKwx +zc1vu/y85p+maJh6Th+SOWeveXR4/wDLm8v+nX5mUWa9bbrInolmu5jdTNkxYFk2Isr3TuCzeGMz ++THdEyDDJO9Ja823rt2XWnya946pGvktDXta0ztWu/ybvLE9dkcoOf4GbJPWK1j49VmLh9JtE33v +Mevb9G7WsW8l1ccREISophiJ2jpDYpijbaOjOuOJ8ujOdqxsgVcsUjaETYvbaFFrgu5lVsm0yUtu +ryg43H5m+GIj1XcJzePoL4pnrWGtxmfchr8JvfHS1622if3QljzTTLes+qrNjrkiYtCzPMxnm095 +YZJ6boS5teB49Tqscza97VtvWvlv8V/FOF34RrIxTM2xXjelp/eHoeA6XnzReY3ivX/0dfivDcfE +9HbDbaLx1pb0lOs+jO7K8Lis3cN+0NKcd9PmthzV5clJ2mF9J9GHHVL108dm1SznYr/Ft0tuhLb8 +mNohFbMhLWy0mJ3rPXvDvcO1karBG8/WV6Wj+7kWrvDDBlvpdRGSnbzj1hpjX4z8mOx6UYYstc2O +uSk71tG7Ns5AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACZ2jeXneJ62dVl5KT9VTt8Z9W9xbWclPo+O +fft9qfSHEU1pv48ftYST23ZTDC/p0YtlVuvVjMbM5+LCZjYGWGdrTPxiHY4ffaf3cjTxz1v6xMS6 +Olty2iXVj/Dk8n+ndrkhnGRo1v8AFdW3RCrZ5uiYsqrboncSu508yjmZRYQt50TfowYTbYGVrKrT +uTZjvukQnYhMIGVY2ZxPVWyrHVCWzXpVXkt3TE7Va+W4K7X3jv1auTNy3jdba0RZpamfroQN7Hk3 +6wr1GTaN2OOJiu6Mu98NvgDi8Wy74d/yZ8PiPAiO2zU4nb6qIn1bugjfFE/ASp1ke9u15mbbRDZ1 +Mb823kx0Ontn1OOkedoJCvT8I03gaKsz9q/WW+isRWsVjtHRKyrhe0XCfpWL6Vgr9fjjrEfeh5fF +feH0V5Dj3DPoOo+k4a/U5J6xH3ZZ7z3228evytOk7NvFbo0cdols47bSybt7HbddHVqUs2aW3Qnq +xVeu8LILR3SlZw3V/R8nhXn6u0/pLuPMXjeHT4Zruf6jLPvR9mZ8/g1xrvpz+TH7HUAaMAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAABRq9VXSYJyW79qx6yvmdo3l5viGs+maqYrO+OnSvx+KLeLZz2te1rZL2v +ed7WneZYWnZl5K72YV1xEyxmeqJljzIEWlVkszvbZp5soN3h2SJz3pP3odCnuWmPRxuERfJrZmtZ +mtY96fR28kbX3dXj/wAuTyf6bmK+9YX1s0cNtm3Sd4LFY2K23W1s16StiUJW7bp22RW3RluBuruz +mWEgrmCGWyNkoExKE1QlPmsqRDKeyBjaejWy2W3ttDUyz1QKslvehVqKTNosyyTvELabXptIJpaP +B39Ia2mz+JGpr51jdZefDx2hzuHZObNq58poJaGtjxJ2+LoaKP8ADRPo5+T3skx5OhpOmC0fBNQ0 +5yTbn+bt8A0u9raiY6RHLVwY62mI6zMvaaHBGn0mPHt1iN5+aYVsACBXqMFNTgviyxvW0bSsAeE1 +mkvw7V2w5Ote9besJx2er4rw2nEdNNekZa9aW9JeQjnxZLYskTW9Z2mJY7zz26fHrrdpbZsY7NGt +mxjvso1b9NmUwpx33XRO4K7VUTE1nmrvEx1bVo2VWiJE/XY4frY1WPlt0y17x6/FuPM0m+HJGTHO +1qu9pNVXVYt46Xj7VfRtnXXL5MfzexsALsgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHM4jxOMFJphmJv529Dq +ZLfjDjPEIx450+K3v2+1MeUOHSOWFc3nJkmZnf4yujpVlqunOeFpV2nctLCZUXRM7MJtsWlRkv3Q +ky5NmpWt9RnrixVm17TtEQnJabXisRMzPSIew9n+CRoccajURvqLx5/chfOest642OGcIpoOG2w7 +ROW9d72+LQvXevyejcPUU5M+SvpLeOataraw2a0dLbLqTtK1G3Es4lVWWUSoldFtmcXUbpidgXzK +GEW3TuCUSncnsDFMMLSms9EC6J6FpVzbZE5ALy0809ZbFr9GtfrEoFMzuuwz0Ueey3HbaBLDXe7i +tMOfwWnP9I+NZbuttvhs1uBRtXPb4SDm3iIvf57N7Dbl0VrS5+XrltEd+Z1Jx7cNms9N4TURRw3T ++PrcO3WszEvZOD7P6aYiMlvu16S7y1QAIAABxOPcLnUY/pWCv1tI96I+9DtgmXl68Biy7/NtUu3+ +O8HnFa2s0tfd75KR5fFyMWTdhrPHVnX9R0cd21S3Rzsdm1iuqs256wrmGcT0RYSx5d047X02SMmO +esd49YRE9WcdSXhZ2O1p89NRji9J+cei1xMc3wXi+KZj1j1dTTaqmor06WjvWW+ddcu8XK8BZmAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAMMmWmKu952UZ9XFZmuP3revlDTtzWnmvO8q3XGmfHb9ZanV3yxtWeWn7y4es +vPNtDqZJ6Ts5mppvdl/XXRMyfGvSNlu/RVvtOzLfoipLT1VTKbSpvfogRkvtDVyZOhkyvQcA4Dzz +XV6yvTvTHMfvK+c9U3rkW+zvA/D21urr789cdZ8vi9KDb45rejl8Rry6iJ/FV1HP4vXbBTJEfYt1 ++UpiHM295bXsqrO9l8QkZ0lZEqqLeyBZHZLGvZkhIndADKJ3TMoqWQMZ6pjsxll2jsCLSrmU2lFY +36gieyu0LJk3jbsga0wdqzK20QpyztQGprL/AFMrOE05NLkt6qdVWZxNrSe5o9vWBLiUjnzXn0vL +q555dHt8HOwV928/1z/LpzXxbYccRvzTB+jucOwxh0dI22mY3ltIrHLWIjyjZKyoAAAAACJiJjaY +3iXleM8InR5J1GniZw2n3oj7s/8Ao9Wi9a3rNbRE1mNpifNFnVs65XhcWTdt47bnFuF24dm8TFEz +p7T0/pn0a+HJux1OOrOux08d1ndqY7tillVkzExLOk7yd4YxGwluViJhE45raL0na0dtlWO0+bZr +1TKi+2zptZGTamT3b/tLacvJjiY3XaTWdYxZZ6/dtPm1zrv1z78fPcbwC7EAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABhkyV +xUm152iAZWtFazNp2iGhm1Vss8uP3aevnKrNntqLdelI7VRHRnrX/HRjx/tZREVjZXeybW6KbWZt +pCZ6S08tN7Nmbb7zCrJtyoS5145bSx5mWafelr3tsKmS/o08uXyhlly7RPV2+AcBnPNdZrK+53pS +fP4ytnPVda4y4BwHxOXV6uvu96Unz+MvVxG0bQRG0bR2G0nHLb2gCUDX12LxtFmpHeazt82wT1gH +mMN4tWs+rcr2aEV8DU5sM/cvO3yb+O0csLUTSdrLphRE8tlkZI7Atr2ZMazDJVKTYSCawi7Ksq7z +1QERvLK3ZGPrKbyCrbdnMcsbeaa18/RhvvM7oGEwTG0JmYYTIML22a2e28xELM19oURPNO4lOem+ +n3ZY5+prVnMc2GYU4/L4A0a15cNf6rz/AC6fC6+NxCPOuOu/5tHJTbHj+F5/l1+BYumXJMd9o3/d +MRXYASgAAAAAAABhlxUz4rY8lYtS0bTEvH8R4ffhmo6bzhtPu29Pg9mq1Gnx6rDbFmrzVsizq2df +zXkMWTeIbNL7tbXaHLwzUctvexWn3bmPL8WFnHVL326VZ91MfFVjvvVlz79kLrcf2m7j7bNHH3bl +J2SirLQoy4t1++7G0dBC/RanxI8PJPv18/WG241+alovSdrV6w6mDNGfFF4/OPSW2b1zeTPL1aAs +zAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAVZ9RXBTe3WZ7R6iZOpzZq4ac1p+UermZMl89+a/byj0Ra9815ted59PQ32hlrXXRjH +DpCLX6ML5NlNsm/ZRqstfdXzbsZt06sLZNvNB1Za8RDWyZdo7q8udq5Mu/mIMt4md2lmy7JzZuWJ +dHgfBL8RvGo1MTXTxPSPx/8AstJ1XWpIs4BwSdbeNVqq/URPu0n73/s9hEREbRG0QUpWlYrWIisR +tER5JbSccur2gCUAAAAPM8Sry8Uyz67fwuxbzVPGsE49XGbvF42V4M0TEL33ERnktsxpk3sumK2j +admFdPFZ33VS2Mdui2J3UU6LYlFSsN2O5NkCyJ6K7T1TEsbAsxdpReerKkTFGMxvYEz0rsqtbbpC +b2VT1QEzuwtbaGUxspuJU3neWdKoiu8rq12gCI92YatLcublnzbEz1aOptyZqTuDHLfxN6R0+t5X +qdJhjBp6UiPLeXl9NSMnEKxHa1+bb8nrlvxUAAAAAAAAAAABTqtNj1eC2LLXeto/R43VabJw/VTh +ydY+7b1h7ho8V4dXiGlmvbJXrS3xRZ1fGv5rzeHN02bEW3cys3xZJx5ImtqztMS3MeTeGFjqlb2O +8btql3NpbZtYsnSBLeiWfdTjtutid+ghherHS5p0+f3vsX6T8Fkw181d4lMvEWdnHaGnw/UeNh5L +T7+PpPxbjdyWcvAAQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAo1Oprgr63ntAmTqdRqK4K9etp7Q5d7Wy2m953lNrWyWm953mVd77R0 +Za1104xxlN9lV8qnJl2a9s3xUXX2ybsJyRDWtl3YWydEC+2VRkzeW6q+T4tbJm+KRdfK1cmWZnlr +vNp7RC/R6HU8SycmCk7ed57Q9ZwvgOn4fEXtHi5/O9o7fJaZ6z1uRyOEezVstq6jiEbV71xevzer +rWtKxWsRFY6REeSRrJxz22gCUAAAAAANbX6aNVpL0npMRvWfSXlKamsRMVvXm+EvZXjmpaPWHzfL +oNRjzXicfWJ8phfPxFejx72x7xMzK+sXiNoiXlq+Pi6fWV/VfTNqfLJl/WTg9Pji8R70LqvMV1Gq +j/zcv6yz+lanzzZP1lWpelTET6S81Gp1P/Gyf90s412rjtnyfqql6asREdWM9+jz9eJ6yP8Az7uh +odZqMt458tpB1JvEViI3/RhzRt13/R1MNaziiZiJn5K9ZNceKZiIiQcu/WekT+iYrWI3lzdTrs+8 +8uW0fJzcur1Np/zsn6g79phVaIeetqNR/wAXJ/3SwnUaj/i5P+6UD0ldonum161h5mNRqP8Ai5P1 +lNtRqJjacuT9Qd22WN5aGeZyZd/KHJy59RHbLf8AVq31Gp/4uT9ZEvS8Lr/vSs2npzRtL1z53wK+ +oza/HW2XJNd99pmX0Rb8VAAAAAAAAAAAAAAcHj/C5yV+l4I9+v24jzj1cLFk8nu5jeNpeW41wmdL +knU6ev1Vp96sfdn/ANFdTrXG+eq1q5F2LLtbZoY8m8d11bbSydErsYsm+zZrO/zcnBm226uhiyRK +EtrvCrJDOJTeu8A1MWX6Lqq5N/dnpb5O5ExMbx2cPNTeJb/DM/iYPDtPvY+nzhri/jDy5/W6AuwA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAa2p1UYo5adbz+xbxMlvqJ1OqjDHLXree0ejmzNrWm953tPmTPWbWneZ7yoy5YhjrXXTjH8s75N +mtkyxt0VZM2/m175N1V03yTKubMLXVXybeYLLX2VXy7eam+b0bOg4VquJW+rry4/O9uyZOq3UjVm +9r25axMzPaIdvhns1kzbZddM0p5Y47z8/R2+HcF03Doi1a8+Xzvbv+TotJnjDXkt+K8ODHp8cY8N +IpSO0RCwF2YAAAAAAAAACvUZYw6fJkntWN3k8dfHz2vLucdz8mkjFE9bz1+UOZosX1UzPm0nqI/W +MYo9FlcPNklfFGeH/NshLGun+Cz6PtHZtVZWlRLS+jxPkRpIn7rdoupHTdA5s6SI+7H6Mfo+32Y2 ++To3neSIiZ7A0IjPXpXLePlMotGW3272t85datKzHZjbTVnsDj+FG/2Y/RlGP4R+jo20u7H6N1Ql +o+H8I/REY957R+jpfReiK6eOYHLtj2tttH6KrY/6Y/R2c+kjeJiFVtLG24hxpw7/AHY/RRkw9O37 +O99Hrt1YX0tfOBLjcGp4XF8c+u8fs9c4dcVcGemSI61nd3IneN1orQAAAAAAAAAAAAABFqxes1tE +TE9JiUgPKcX4RbRXnNgiZwWnrH4XPi28PdXpW9JraImsxtMS8pxXhF9DecuGJtgmf+1TWW2N/la1 +L7N7T5e3Vy6W3hsYcvLbqzbO9jvvCzvDR0+XeO7crO6FmGSvRThy/RtVXJ92elvk2rRvDUzU7pl4 +izsd2J3jeBpcNz+Lg5LT7+Pp+Xk3W7js5eAAgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADs0NTrN96Yp6edkW8Wzm6+LNTq4pvTHO9vOfRoWtt +1mes95YWvs1s2fZldddOczLPLn2ju0MmebT3YZc2/mpm3qqllN1drsbZIhr3yzvtHf4AsvlYYseb +V5Yx4KTe0+UQ6nDvZ3UazbJqd8OKeu33peq0eh0+hxcmnxxWPOfOfm0mP+steT/ji8N9mKY9suum +L37+HHaPm9DSlaVitKxWsdohI0Y22gAgAAAAAAAAAABXnyRhw3yT92Nwef4xm8bVzET0rPJH5d12 +CvLhho3rN9RWs9Z23n5y6O21YhrVYbdGOCfrrLPJRpv863zVS6FS09SvZj3lVZZRdPSqmnSWdrIE +ebOkK4ldTsgW1WKqd1oMZhEVZyRAImOjGI6rJ7IiATNd46qL02bHkiaxaoNGY2n4ImPgtyV2n0Vo +Gvlx7x2beiyTk08RPevSVUxux00+Fn2n7N+n5rRFb4AAAAAAAAAAAAAAACLVres1tETWekxKQHlu +L8InR2nPp43wz3j8P/s5dLveWrFqzW0bxPeJeV4xwmdFec+CJnDM9Y/CrY1xv8qvTZ+WYdbDk5oh +5zHk283U0eo3jaZZ2N5XYjrCnLSJhOK+8d1kxvCqzSwZvousrb7k9LfJ3nB1OLeJdLhufx9LEWn3 +6e7LXN9Ofy5/W4AuxAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAETaKxMzO0Qi9646Ta07RDmZ9VbPbaOlI7Qi3i+c3TPUaqcu9adKfy0722ZXvFa9 +XO1OrjrESxt66ZJmcjPUanlidmhkzTZVfLN5VWvsC2b7R3U3yqrZZtO1esz2h2+F+zWTUcuXXTNM +feKR3n5+iZLVbqRzNJo9TxHLyaekz62ntD1fDOA6fQbZL7Zc/wCKY6R8odLBgxabFGPDSKUjyiFj +SZkYa3aALKAAAAAAAAAAAAAADQ4pl2pTFH3p3n5Q33E12Tn1eSfKscsLZ+orS00eJqbW+Lfnu1tF +XaJnZsz3WpCfsyp00fWSvmPdVYOmSUDd8kR3InoQosy7JmUX7MdwZ17ro7KKT1XRPRAsrO0rYndr +79V1ZBaQiJ6JgCSIJASwrO07MpV2nqBlrv1a1o2bf2qtfLXaQUTO0sb05o3jv3ZXhjS20xEphW5h +yeJjjf7UdJWNKLziyRePsz0lux1SgAQAAAAAAAAAAAAAADG9K5KTS8Rato2mJZAPIcU4ZbQZuekT +OC3afT4NXFkmlntc2GmoxWx5K71tG0vHa/RX0GpmlutJ61t6wrY2xr8dXS5uesN+tt4ef0eaa223 +2dnHk3juyreM81OaFGiy/RtZET9jJ7s/2bdutd2jqKeic3iNTsd8a2h1H0jTVtP2o6W+bZbOO+gA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABje9cdJt +adohGTLXFTmvO0fy52bJfU23t0pHaqLeL5xdK9Rnvqb+cUjtCi94xxvK3JetKuHrdZvaa1ljb10y +cnIs1Wt3naJc++TmVWvMz1YWybfMGdsm3eWek0mo4jm8PT0mfW3lDf4V7P5tdMZdRviwfvZ6/TaX +DpMMYsFIpWPTzXmf+steT8jn8L4Dp+HxF77Zc/4pjpHydYGjC3oAAAAAAAAAAAAAAAAADG9opS1p +7RG7zszN6WtPe0zLua+3Joss/wBOzhzG2OsL5+IrY09dsSyYRijbHEMvOChb7KjF0yS2LQ169Mso +S24noyrPVXWejNVKbTuw3T3REdQWU6LYlVvsyiUDPfqupPRr79VuOQX1lZEqoZxIMksd0gT2VT0l +bPZVbuCaW8i8bwr32WxbcGnkjaZa9p2ndv5qbw5+aNugLItF6TEtvTX5sMb969HMpfazc0d9stqe +vVZDdAQAAAAAAAAAAAAAAAADV1+iprtPOO/2u9bektoB4TJTJpNRbHkja1Z6uto8viVht+0HDvpG +H6Tjj6zHHvbecONw7Ltfkmeqmo6Ma69DXbbZTkr1mGWO3RneOaGbZRoM30fVzSelMnT83aef1FZ7 +x3h1tBqfpGnjmn369LNc3sc3kzy9bQCzIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAa+q1dNNXr7157VhGp1Xh70x+9f9ocy283m1p5rz3mVbrjXHjt91lz +5c9+fJ1nyjyhdM8lZlOOIiqrUXikd+kMreunnI5XEdX4dZiZcG+XmtNl/F83PeeWWHDOGanieSKY +q+5H2rz2hMzWd1Iqx1yajJXHhrNrW6REeb1nCPZumn2z62Ivl7xTyr/6uhwzhGn4Zj2xxzZJ+1kn +vLoNJnjHW7TbbsAszAAAAAAAAAAAAAAAAAAAAaPFrbaSK/itEOXt0rDf4xb/ACa/GZacRvaF58Q2 +IjasQnzPIhCU92tMbZGzHmotG10C6nZkwpPRmipIllEbMIZIE7solgmJBnCyk9VMM6z1BtVllEqK +z0WRILYlluriWcSDJVbusV27gwInaSWM9ECyZ3hqamnSWxFmOSOaqRx725bNnSZNs9J+OynVY+WZ +YYr7TE+nVaIr0Ais81Yn1hKAAAAAAAAAAAAAAAAAABExvG09peU4nov9n66L0j6q/WPg9Y1OJaON +ZpL0+9HWs/EWzeVz9PbmrEtnyc3h9reHy26TWdnSr2YX6657ijLXpLX0+onSamL/AHJ6W+Tbv2aW +ekTv16JzeI1Ox6KJiYiY7Slz+E6jxdN4dp3vj6fl5Og2clnKACAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACZ2jeQRMxEbzO0Q08uqtkma4ulfO3r8lefUePMxWf +cjy9WvlzVxV6T1Z61/x0Y8f7Wc7Ur1lqVy+LqOWJ2hp6rXddon5rOF1tfmz5OkT0qzb8dWbxjp1c +biuuilJ5Z6r+IcQrixzEy8zl1E6rNt1tMztFY81sztU1eRucN4ffi2p5esRM72n0h7rS6XFo8FcO +CkVpX082nwXh3+z9FWLxHi36328vg6TZyW9ABAAAAAAAAAAAAAAAAAAAAAADj8Unm1tK/hqppHvw +y1k8/EMk+m0GOPeafiFpCZYwolnXspvHvLa9mF46gmnZmwozRUiUCBKYYsoBLOFbKAX0llEqqyzi +QXRLOJVRLOOwLIljZMEgrlhKyYYTAK5nZPN0RZjugUanHzVlz6xtLq361c+9eXItPpXX0dubTU+E +bL2lw2++O1fSW6m/VYAISAAAAAAAAAAAAAAAAAp1GbwcfTreelYEydcuMcRrM/L9nnlsV6wqpi2r +tv133mfWVkRyRtEdGFva7MzkYZNoamWN4bV4mYa9qztKIujhVppxGI8r1mJegeZpknBqKZY+7L0t +LRekWrO8TG8Ns/HJ5ZypAWZAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAADS12fp4VJ6z9qVuq1HgUiI+3bpDl589cOKZmevqprXPTbx477rDJlrhr1nq4+s182tMRP +RqaziXiZJrWekNG17ZbxWJ336M5LXRbI3dLTJrs07RMY6fan1dHLrowY+X7MVjt6N3R6Kul0EbWm +s7bz8Z+LnabQX43r7Y53php/mXj+Dnv0f1JO1x/8ZxbUzj02O15mfLtD13AvZqnDds+pmMmo26el +XX0Wh0/D8EYtNjilY7+s/NstpOOTW7QBKgAAAAAAAAAAAAAAAAAAAAAADG88tLW9I3BwJtz6nNf1 +vK/DHVqYJ3pzT5y3MPZeojOWMQylEKpTVjZnDCwkqzYQyRRICATCITAJZQxhMAshnEq4ZQC2srKq +qrIBZCWNZZgwswmFloVyCu0dFcx1WyrtCBhv5NTPHXds2U5o3hIz4ffbPt+KHUcTSW5c9Jme0u2v +VYAKpAAAAAAAAAAAAAAAAYZctcVOa35R6tLrltN795/YvknNqrfhpPLH92V5isd9mWq6fHjk6rn0 +ZxG8KK5Jm/wbVZiYZtqrmkqL023bkxvCiY3lJHNyRG81mHS4Rn5sNsNp64+3yaWaNrzOzHBl+i6q +mT7s9J+S+ay8mex6EIneN47SNXKAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAImYiJme0JafEs3h6fkidrZOn5eaLeJk7eOdm1Hi2vmtPTry/CHmOJcUvmvOPF1n09Pm +6HF9ZGm01qxO3R5vSY7XwzmzTy47zzTEd7en5Mfvt2/PURWdo3tvPrPlKymbktFqTtMTvHzbOLDG +f63JXbFX7FdnoODcDprZpq9TjiMMTvSn4vj8l5fxnrk91saPSa7i2hpOfbTVt5x1m0fLydzR6PDo +dPGHBXasd585n1lsRERG0dIF5OOe6tAEqgAAAAAAAAAAAAAAAAAAAAAAADX11+TRZrf0y2Gjxe22 +gtH4piP3TPpXKwxtjhuYo9xq442iIblI2pC1RET2ILd9kxCqRjZmwlCSEohIJAQAAJZISDKGUd2M +MoBnVbVVCyAWVWeSuqyOwIlXZZKue4MJV2WWYT2QKbKL9YlfdRdIo35b7/Hd3KTzUrPrDh27uxpb +c2mpPwX/ABX9XAKpAAAAAAAAAAAAAACekTIp1eTwtJmv+GkyJn1oafeazbfpMzLR4jq/o8b823zX +6XNF8ERCvTcNpxLV5LauvPhx9Irv3lhztdtv8TtaWLicXrt03jzjzb2k1nid56ty3s/w+a7Uwzjn +1raejlarhmbhl/FpbxMO/fzj5p/ixSeXOvTtRfeI280ZI26tfDm3pWe63LaZx7qtGvniJ6tPLvOK +fOa9WzbJvTbza02jl3n5SSljscK1MajSxWZ96nSW88xw/VfQ9XMT9nfa3yemid43jtLeXsce88qQ +EqAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADia3UTm1l4j7OP3Y/u +7Vp2rM+kPJW1PhYcmS0+9MzKm/jbwz31weMzbV8UppazPL9q0/BF4rk1GLDSNqxPWPhCnHmnNrtT +qPKteWPm6U6OdHaZvO+SaRNvhv12Ub/q3FhtrNVj0uKOt56z6R5y9zix1w4qY6RtWsREOJ7L6OKa +S2rvX6zNM7T6Vh3mmZyOfya7eACzIAAAAAAAAAAAAAAAAAAAAAAAAAAczjVvqMVfW/8AZ03I41bf +Lp6/OVs/UVrY47NyOzUxd4bUJpEbb3Z7IiOrKIVSjZhMLJYyhKIgmGUQSDESIEbJEgQmCITEAmGU +IiGUAyhZVhDOoM4Wx2VQtqBKuyyWEgqlhKyyuyBVaGtkbNmvk7A15l1eH2300R6TMORPSXT4ZO+O +8fFefEX63gEAAAAAAAAAAAAAAAq1WPxdLlp+Kkx+y1Fvsz8gjhaDauGK8sx07y3OE3m1tT6RaP4c +vU6yMNKUx73zT0ilY3l2eF6a+m0kRl/zbzz3+Ez5M8z26fJruW6wzYq5sV8d43raNpZjRzPPaTmx +5b6bJ9rHO3zb2WJ8GWPEscY9bgzxH2t62n19GWW0eHOzHU5XbjXZ1x8WTnz2iZ7S2M1IjH2+LX0V +KTqs8zO9ot0j8nUthi1J3UaOFMTfLFo6xMbS9BwHWTqdHOO8+/hnln5eTjYMFo1WTH5VnePzXcIm +2k4zlpPSmXy/hfF5eMfJns69OA2cgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAADG/2LfJ874rW845mubliY7bPoto5qzHrDz0+yePNF41OotaJ7RWNtpV1OtfHqZ715fhu +j8adNpcVfeyzE2/vLuanhOu1nEctIxTTFa/+ZPbZ3eHcF0vDbTfFE2yzG03t32+DokynXl9+leDB +TTYKYccbUpWIhYCzEAAAAAAAAAAAAAAAAAAAAAAAAAAAAcXjE/4zDH9M/wAu04XF5/3jj/0f3Wz9 +RUYmzDWxS2I7FSyjuzY1ZKpRKEygEwiWUIkGIk2QJNhKQhMIhkCYZQxhlAMoZwwZwgWQshVCyATL +CWc9ldpBhZXLOVdpQK7NfJPRdaWvknoDVvPvOnwuel4+TlXn3nS4VPvXj4QtEV0wAAAAAAAAAAAA +AAAAAVV02CmTxK4qRf8AFFeq0AAAanEsfPpZmO9Ji0NDLfkwdOsulrumiyzHlVzJrz4Ovoy26vB8 +cTBa9NffLtMY77Rv8Yegx5ImkKdJoY1HC81Y+3OSbVn0mGGkmbY45u6tnrrTOu2xGO0RxCd+nNVj +qKxTV1vH2pjaGtnyzXXYdo96ZmGXEMk15b7/AGZiVerWPTYckZcNbx5wzc7hGbnxXxzPWk7x8pdF +0S9jh1OXgAlUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAcPjEf4/FP9H93ccXjMf4vDP9Mx+62fqKrx+S+GvibEFSsqyYwlVK +ZYsmIMoRKYJQIPIEiQ2ATCUQygCGUIhMAyhnDCGUIFkLIV1ZxIMpVWWSrsCuyqyyyq09ECq8tfJK +66jJ2Bp5J6upwn7dv9Lk5J951uE/av8AJaIrqAAAAAAAAAAAAAAAAAAAAAAq1Mc2myxPnWf4cmtu +XT9fR0tffk0WSe28bfq5Wbamm3326MtunwfK6PCv/AxPraZ/dz9PO97/AOqf5dHhdZrw7Dv3mOb9 +XOxRFM+avpe38mvkPHf/AFWlrKba7Tzt99ZxKkfR7euyNXMTrtPHfa0z+zPiM/UR8Zj+Wbdu8HpN +M2bfzrV13M4dO2pyR61dNvj44/J/oAWZgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADj8bj63BPzdhyeNx0wz8ZWz9RWri7Nmv +VrYu0NmqaRZHZlDGGSiwxZSgCEkCBCQSCQBMJRCYgEsoYx3Z17AlMIhlCBnDOGEM4AlhZZKq4KrK +7LLKrIFN2vdfZReAaObu6/CO9vk5OePR1uEd7fJeIrqAIAAAAAAAAAAAAAAAAAAAAGtxCk5NFliI +3mI32+XVyNTyZOHTee946PQKPoeDffw4777eW/yVs60xv+ZxOnr4Okx1t05KRv8Ao41Z5q3yed5m +XY1szXRZ5jvFJ/hxItP0aOSN9q7yrtr4f2tHFM5+KT16Yq/vK/iGSbXw4vO14UcPx5MGfNbPG18m +1oj4THRsTw7VanPXVYpi3gzMcnrvCnG11JOupwuN8+a3pEQ6jT4divjxWnJExa09pbjbM5HHu90A +JUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAHM41H1GOf6nTc/jEf4Ws+lls/UX45uGekNujTwdm5RNIthKIZKLDFlsiQIShIC +EgCUJ7AmGTGO7IDzZQhMSDJMMYZQgZwzhhDOATuqssmVdgVWVWWyqtCBTeVF19lF+wNLNG7q8I+9 +8nLyupwnt+S8RXUAQAAAAAAAAAAAAAAAAAAAAAAItWL1mto3iY2lyrcLyUxzix2ia2nvPeK+jrCL +OrTVnxpanhuPPemSs8l6RtE7dJj0ldpNP9GwRSZ3neZmV4cR/Vs4AJQAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAANHi1d9H +M+kt5ra+vPoskfDdOfqK4mn7Q3aNHBPZu0W0RdDOGFWcKLCJZeTGQQlCQSgASBsCYZQxhlAJTAmA +TsmAgGcM4YQyjsgRLC3VnaVcgwsrt3Z2V2QK7tbJ1bN5a9waeWO7p8Knt8nNyebpcK8vkvlFdQBA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAK9RXmwZI+ErEWjesx6wQeZwejeo0cccuW8 +elpblJaaRGxVnCuss4ZrMvJEgCAASISCQIBlCYYpieoM0wx8k7gzIRueYM4Z79FcSy3QEsLJmWFp +BjaVVpZWlXMoGNmvkXXlr3kGtknu6XCf7OXkl1OEdl8orqgIAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAHmskcmtzV/rls0U62OXiWX4zErcc9GmkRfWVkSqqziWayxCPIANwBIhIJSxS +CRG6dwZwlhEs4BluMdzfqgZxLLdXuy3AmVdpZTKuZBjaVVpWWV2QlhZRdfZRcGpl7urwfrzfJy8r +rcH61vPyWitdMAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHA4nHLxKZ9awnH2ZcY +jbW459aq8fZpfiI2IZwrqzhmsz3Ebm4JN0AMhCQSIASndiAziWUSriWcAyRujc80DM3RCfIETLCW +UsZEsJYSslXZAwlTddPZTkBp5e7r8Gj6rJPxhx8k9Xa4PG2C8/FaK10QAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAcfjcbZMFvnDWx9m5x2PqcNvS+zSxT7sNPxH62YZQwqzhRZO6UCB +KUAJTux3SDIRuAncQAmJZRLBMSgZ7iIAZRKd2DICUSlAljLCYWMLIFVukNfI2bNbIDTyT7zu8Ijb +Sz/qcG/2nf4T/wCE/wD2WnxWt4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHL9oL ++Hw2cm28VvEuPptfgyVj6yIn0no7/FtJfW8NzYMe3PaPd39d3iMug1WktNc2C9dvPbeP1aZ9xF+v +T471tHu2iflK2HkqWmvaZj5Surqc9Ps5bx+alTHqYHm68S1Vf/NmfnC2vGNTXvyT84Ql6A3cSvHM +sfaxVn5Ssrxyv3sM/lKB1xza8bwT3pePyWV4tpZ+/MfOEjfGrXiGlt2zV/PotrqcN/s5aT/+wLRj +FontMSlAlKEgndO6IAZQljDIEgeQljLCzOVdkCu/SGrkbF56NPNeKxMzMRHxENe0+89DwuNtHHzl +5PJr8NcnLW3Pbf7r1nCZm2gpae8zMrz4i/W6AgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAETETG0xukB4HVaeMHEtRi26RedvkyjBSfX9W77QYvC4xz7dMlYlrU7M929dWJLFc6aPK0q +7YLxPS0S22FlP6q38Zac0yR92s/KVc3tHfFf8tpbcsLRvB/dR/8ALLVnU0r9uL1+dZI1mnmdvGpv +6TOy6ym+Oto2tWJ+cJ/tW+KLK5KW+zes/KU7tG+h01p64qx8Y6NXNo6Y+uPJlp8rLf0rfG7MXtHa +0x8pZxqs9e2a8f8A7Oj7HaTHn0+f6RWM23LETfr6vRW4PoL99NT8ui7F4+vEdXXtnt+fVbXjGsr/ +AOZE/OsPS29nuH27YrV+VpeV9pdPXhOtw49NG9Mld55+vXcTPd42I47qo7xSfyWV9oM8d8VJ/VxM +d8l46xWF9cV7en6o/qLfxp2I9ob+eCv/AHMo9op89P8A/wBORGmyT5R+qfo2X8P7n9Q/jTsx7RR5 +6ef+4/8AuHftg/8A6cWcOSO9J/WEbWr3pY7Efzp2Lcfv5YK/9zWy8d1E/ZpSv5Oba1/+Hb9lc+LP +bFt87I7E/wAabWbiurvEx4nL/pjZzc2bJkn372t85ZXx55/BX85lucC0vPxnTxlnnjm32mOiZqUu +LJ2p4TwnVavNWaYbRTfre0bQ99pcH0bT0xb78vmtiIiNojaErMwAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAHnfarF7umzRHaZrLjYrdIen9ocPi8JyTt1xzF4eUw23rCm3R4r6bMy +wt6kdTaWLdjswmNoZontsCm0K5XWjopnuDC0dGpqG5bs08/daKV672MjbSaif6oh6Z5f2LtvptRX +0tEvUN3Jfo8f7cYve0eX4zV7B5z20xc/C8eSPuZIRficfXlcPaG7ino08HWIbePpLF2NuiyOyrHK +3fZFSwuovHVfaVF4QK5YWTM9UT0EKry6Ps1Tn4zjn8NZn9nOtLseydObiWW34cf918fWfk+PYANn +KAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAq1WKM+ly4p+/WYeBxTNd6zG0xO0 +vobw3FcP0bi2em20Tbmj5Srr418V9sa2Z7qKyzi07MXUylhaU7yjqhLCeiq3ddaFNxFYW7NLNG8t +zya+WO6Va9J7FW66mvwidnrXiPY3Ny8RyUn71Jj9Ht3RPjk19HK9pMHj8D1ER3rHN+jqqtTjjNps +uOe16zAifXzfTz7kNyndpYazS9qT0mszDdoxrsi6m8LazMq6zDOsq1ZEyrt1WWlXaUCqyq0rbKbi +Fdp6PReyFd8uqv8ACsfy83aXrPZHHto89/xX2/SP/dpj6y8vx6EBq5gAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAB5n2q03LfDqqx39y39npmlxbS/TOG5se29tuavzgWzeV4mtui2 +O3RRSY2hdVhqO2MvI36iu9lUsrSrvDHn6spnmSiq5jooyV6tq1VV69RC32byTh43h8otMx+r6I+Z +aK/g8TwX7bXh9Mid4iW+fjl8n1ICWb57xLBOm4zqse20Tbmj8+qKdnS9q8PhcTw5tumSm0/OHMxz +0Za+uzx3sX1t0Zxurr1ZxvspWiZYWZbsbT0QK7KLrZVZJFaqt5vbezNOTg9J/FaZeJns93wCvLwb +T/GJn92uGHldIBowAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAADuAPA67F9H4l +qMW20VvO3yRWW97T4fC4rXJHSMtI/WGhVlue3b473K2KzMML4+62tujG9pnozXaOSOVFMnVbmq1t +trJRW5E7wwvUxTvCyY6CHOt7moxz6Wh9PxTzYaT61h8x1MbZK/OH0zTf+Fxf6I/htj45vL9WgLMn +mvbPFvocGWO9L7fq85p5maw9d7VYvE4JkmPu2if3eW0+PasdFNOnxfF1Y2hlykRsmY+LJ0MZjZXa +eq2eyi8oQTO0KLdZWzPRjWu6VaqtHR73g0bcI0sf0Q8Nkq93wqNuFaWP+XDTDDytwBowAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAef9q8HNpcGaI60vtPyl56k9Iew49j8ThGe +PwxFv0l4zH2U26fDfTYiyJljvsjf4sm6vJ1hrXjq2MkqLdZEVbgbMx0auGdmzNt6iHN1Ub5af6of +TdPG2nxx6Vj+HzaaTm1+nx/iyVj930ysbViPRrj45vL9SAuyc7j1efguqj+jd4/T33rD3HEcPj8O +1GP8WOY/Z4TTT7sKadHhbcsZnaCJ3TPZk6VdrKbTutmP0U2nqgrGOsr8deiuI2X09EqKM1dt3uuG +f/jdN/06/wAPE546S9rwud+Gaaf+XH8NMMPK2wGjAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAABrcRp4nDtRWPPHP8PCYusPoWSvNjtX1iYfPuWaXtX8MzCuvjfw32siu8ptXoxi +0wy5t4YulReqmazu2skbquURWFInddM7VYRGyL291KFnCcfj8e0le/Lbmn8n0N4b2Ur4nHLWmPsY +5e5a5+OXyXugBZmiY3iY9Xz7NjnTa3Ph/BeYj5PoTxftFg8Hjk2iOmWkW/Psrr418V5WrWd2faFc +V2jdnEMXWxntupmN7NiYU27iWML6dVMVnddjgVqMsdHr+CW5uE6f4Rt+7yuSsTDv+zWXn0WTHP3L +/tK+GHl+O0A1c4AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA8Dn93W56/wDM +t/L3z59qp24jn+OS38lnpr4r7ZxHQ2TEstt3PXUrt27K57rr1VT0BjKnJPRbMqMs7QlV2fYvHvrd +VknyrEfu9m8f7FZI8fVU85iJewbT45NfQBKo817W4eulzxHaZrL0rje09ItwqbfhtBVs3leai8RD +KLw1sduesL606dWFdsZT1jdhNeq6K9DlhCVUU6s4jZnt1YzAhnM71dH2bycmszY/K1d/0c6OzY4R +fwuK4p8rTstn6z8k7HrwGzkAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHz3 +Vxvr80/8y38voTwGpj/F5/8AqT/JfjTx/WVeyY6FPspc9dZPVXaOq2WEwIUTVRmjo2rNfLHRI3vZ +DJycXtX8dZh7t879nsnhcbwz23tt+r6I2nxyb+gCVBzuPY/E4PqI9K7ui19fTxNBnp60n+Aj5/pJ +3jZu1aOnnltMNussdfXbm+l3ZM9URHREdZVXTuT1Nk7boQiOkJw28PU47/htEp5eivJPLMTCZ9Vv +x7mJ3iJ9UqNHk8XR4b+tIXuhxAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAD +weqjbWZ4/wCZP8vePCaz/wDIaiP+Zb+UX408f0r9lOxWOifJhXWjfyYWllPRXYQxnrCrJHRd3YZI +6A1NJecHEsN/S0T+76bE7xE+r5dk93LW3pL6ZpMni6PDf8VIn9m2fjm8s9rgFmQxvHNS0esbMiew +PnHLyai9fS0w2aNfUTtrs3+uf5bGPqy068fF227KtSsdFlKqNGMV6myyY6sbdIQI8tlOWOi6Jhhk +j3RD0vA8nicMx9etZmHRcT2Zyb6XNT8N9/2dt0T449T2AJVAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAHhdfG3E9TH9cvdPEcXjk4zqI/q3L8aeP6xr2TsxpLOekMK6mFo6qpXSrm +OqBixvHSVmzC4OfqK7S9/wAByeLwbTW9K7fo8Fqo6Paeyl+fglI/Da0NcMPK7QC7AAB8313TiOf/ +AKk/y2MHWrX4jG3E9R/1Lfyv0/aFNOrHxuU7LI7MMayGTVlHWUXhNe6Z6wIUsb9d1m20q7dkDpez +N9tRqKT5xEvRvKez9+Xis1/FSYerb5+OTyf6AFlAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAB43j9eXjN/jWJ/Z7J5L2mry8Upb8VIF8f6aGOey2eynHvOy7bowrrYSxZSwQJ2YXZ +92N4BoanrEvVexmTm4blr+HJ/aHltRHSXofYm/1Wrp5RaJaYY+X49WA0c4AD51xONuKan/qW/lbp ++0MOLRtxbU/9SU4J7KadWPjep2WQrr2WRPRk1TvsndXMpiRCb9FNu0rbTuqvKBscCjfi9PhWZeue +V9n434rafTHL1TfPxy+T/QAszAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAHmv +avHtfTZfnV6VxPajHzcNrf8ABeJFs/XnMcr4no18c+6vr2YadkY2YM57sEDLyY37Mo7MMnYGlqO0 +vQ+xNfqNVb1tEfs87qZ2rL0/sVX/AHdnt65P7Q0wx8vx6UBo5wAHz/jUbcX1PT78qtO2vaCnJxjP +8Zif2amnnspp04+OjWejKJ6MKdmcMmyJn4m5ZHzEVPMwtJv0VZLbQDqezcb8RzT6Y/7vUPM+ytZt +n1OTyiIh6Ztn45N/6AFlAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABocbxeLw +nUR5xXm/Rvq8+OMuDJjntaswEeBxT0bNZ6NatZpNqz3rO0rqsdO3PxlaWEMpY+aqWXkryT0ZT2V3 +7A0dVPuy9f7G124NM/iyT/Z4zWT7sw957MYfB4Fp4/FE2/WWmGHldcBowAAeM9qKcvFeb8VIly9P +0nq7ntbTbVYL+tJj93CwT76unR4/jo0nozhhTsy3Y1sWljM9Ce7HyQIm3RRlttVbaWrnt0Sh6n2U +x8vD8mSfv3/h3XN4Bi8Lg2nj8Uc36y6TeOPXugCUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAPD8RxeBxXUU26Tbmj8+quro+02Lw+I4ssdslNvzhzazvDPbq8d7GW7Dfqz2VzG +0s2qd+iu/Zn5Ksk9BVztX1mI8930zh2LwOHabH+HHWP2fNYp4+vwYvxXiP3fUqxtWIjyjZtj45/L +faQFmQADzftfj3w6fJ6WmHmsP23rvaqnNwqLfhvEvIYZ+sV038bo0noy36MK9oZQxrdMyrlnMbMZ +QKrS1M07zEestq/RRjr4utwY/wAV4j91p9V18fQdJj8LR4ccfdpEfsuREbREJbuMAAAAAAAAAAAA +BAJAAAAEAJEAJQAJQAJEAJQAJQAJEACUJAQlAJEAJQAJQJAAAEAJEAJBAAAJAABAJEJAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAABwvanDzaPFmjvjv8A +tLztJ3h7HjGHx+FainnFeaPnHV4vFbeIU038VbHeGF+kso7Mb9mTdhKnLK3dRm7SIrHhGPxeP6Sv +9cT/AHfSnz72Zx+J7Q45/BWZ/Z9BbZ+OXyfQBZQABzeP4/E4NqI9Ii36S8Ng/wAx9C4jTxOH6ivr +jn+Hz3B/mQi/GvjdCnWNlsdI2V07LIlg6USrt2ZzZXMoFV+zPhGLxeOaavpbm/RVltEN72Yx+Jxm +b7dKUmf7L5+s9/HtRA2cqRACRACRACRACUAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAACQAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAQCQQCRACRACRCQBCQBCQB +ACRACRACRACRACL1i9LVntMbPATTwdRkxT3pea/u+gPE8Xx+DxrPHlaYt+qNfGvjvtXXsi0dOrKk +dEXjZg6VMtbP2bMtXUdpEV0/Y2nNxbNf8OP+727xvsXH+N1U/wBEfy9k3nxyb+gCVQAGOWvNivX1 +rMPnGGOXNNfOJ2fSZ6w+dZKeHxDPX8N7R+6L8a+L63KdoZ7q6zvEMpnowdKJ6ywmWUyqvIKM0vQ+ +x+D6rU55+9aKx+TzWa36vbezmDwODYenW+95/Nphj5L6dQBo5wAAAAAAAAAAAAAAAAAAAAAAAAAA +AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAEgAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAACAEiAAAEoA +AAAAAAAAAAAAAEAkEAkRuAkQbgkQAkQAkQAkQAl5T2nx8nEMOT8dNv0l6pwfarHvpcGWPu32/WCr +YvK4mOem6b9mGKd4Z3idmFdka0y1c892zfpMtLPaNpEV6D2Kj/Eauf6YeweQ9ieuTVz8K/3evbT4 +5NfQBKoAA8FxCvJxrUx/XMvevD8Zry8fz/Haf2RfjTx/6RSOnRMyypHu9kXjowrqVSrvPRnZVl6V +kK0775MsUjvadn0nT4ow6bFijtSsVfPuFYvpPGtNTy54mfy6vorXDm8l9pEC7JIgBIgBIgBIgBIg +BIgBIhIAgBIhIAgBIgBIIBIAAhIAhIAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJAAAAAAAAAAAAAAA +AAAAAAAAABAJQkAEAAAAAAAAAAjc3BIjdG4Mkbo5kcwMjdhzHMDPc3V8xzAs3N1fMjmBZubq+Y5g +Wbm6vmOYFm5ur5jmBZubq+Y5gWbm6vmOYFm5ur5jmBZubq+Y5gWbm6vmTzAz3N2HMnmBlu5ftFTx +OEZJ/DMW/d0t2rxKni8N1FPWkiZ9eS08e7Cy8dGGn6UhZaJljXZGnmc3UT3dPP2cnUT78xCIV6j2 +H/8A9c/6f7vXPI+w8bU1U+vL/d63du5NfUiDcVSIAS8b7RV5eOb/AIqRL2TyXtNX/e2KfXH/AHlF ++NPH/pr4+2xcxx0hFpY11K7R16KM32ZWz3UaidqSgrc9kcPicWyZJjfw6T+727y3sXh2xarN+K0V +h6lvPjj3e0ASqAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAJQAAAAAkQAkQAkAAAAAAAAAAAAAAA +EgAAAAAAAAAAAAAAAAAAAAAgAAABKDcAN0bgkY8xzAyRux5kcwM9zdXNkTcFm6OZXzMeYFvMibKu +ZHMC2bo51U2RuC2bom6rc3BZzom6sBZzI52ADPnOdggFnMc6skFnMc6rc3BbznOp3RzAv50c6nml +HMC/nOf4qOY5wX85zqOc5wbHOc7X5znBsc6edr85zg2ec52vzpi4NjmY5bROG+/bllVzsNTk5dLl +n0pP8BHmMHWNmzt0aum8obm08vVjfrtnxztR0mXHzTvaZdjVRMTLkZo6yiFen9iZ2pqY/wBP93rN +3kPY+/LfPX1rE/u9XzN3HfqzdO6vmTuIZ7m7Hc3Bnu8t7TR/vHBP9E/y9Pu837SV31umn+if5Rfi +/j/01MMb1hjkrtKzBG0bMsmOZY11tOYamr6Und0LUc7XT7u3rJPqL8er9lcPhcFpbzyWm39v7O00 ++FYvA4Zpsc94xxu227jv1IAgAAAAAAAAABKAAAASgASgBIgBIgBIgBIhIAAAAAAAAAAAAAAAAAAC +UACUJAAAAAAAAAAAABIAAAAAAAAAAAAAAAAAAAAg3AEbomQZbo3YzLGbAz3RNlc3YzcFs2YzdVN2 +M2Bdzom6nmNwW86JurTAMuY3REJ2BB1ZRVMVBhsbSsiqeUFXLucq3lTygp5TlXcpygp5TlXcpygp +5TlXcqOUFXKjlXcrGYBXysdlswiYBVMdUTCyY6sZBWxlnMMZgGLGZZSwkDdHMiWO4MuY5mEyjcFn +N1OdVzHMC3nTzqeY5gX85zqOZPMC+Lqdbk20eb/RKOZr8QybaK/XvtH7iZ9aGlp2luzT3fg19NHS +OjbmPcYX67XH1XSZ9XIzRvMuzrK7zLkZYmYnciunb9lZ5dTk+OP+71cXeP8AZnJ/ip2nf3J/l6iL +/Fu5L9bMWZczXi6YuIbEWTzKIuyiwLt3nuO25uI4a/hx7/rLuczg8TicvFLbfdpEK6+NPH/phhjo +stLGkctUWnoxrrU3j1cnWTzZq1jzl1clo5Zcu8c+txR63iP3Tn6pv4+g4o5cVI9IiGe7CJ2iE7t3 +GyN2O6dwSINwSISAlAAlACRAAlAAlACRACRCQAAAAAAAAAASgASISAAAAAAAAAAAAACQAAAAAAAA +AAAAAASAAAAAAAAAAAAAAAAIAAAQCAJljuljsCJlhMs9mOwMJYys5TkBVsjZdyHICrZPKt5E8oK4 +qmKrOVOwMIqyirPY2Bjyp2ZbAI2NmSARsbMgEbI2ZAMdjZICNkbMkSCNmOzJEgx2YyzljMAwlhKy +WEwCuWErJhhMArlhLOWEgxljMpljIImWMyTKJA3N0IBO5vux3NwZbnMx3NwZczT4jf3MdPW27a3a +fJOq1XNP2KdIRfi+J2trSYfcjeF+Wm1OicVeWIiN9kai8xjY12ORqultnI1Ecsujq79XP1FovWYI +rTgeq+j8QrWZ+3Mx+r2UXeC0WG2Ti2kiN5mL807eUREvbzbaejefHJv62Iv8WUXa0WTFhVtRdlF2 +rz9WUXBtc7jR9dqc2T1ttHyhvZMvJitb0jdq6XHNcNenWVN3028U99WRj6Kb02be3Tq18/SN2Lpc +3UdN9nOmZrqKX/DaJ/d0svvTLRzV3jomK6+Pd1vvWJj0ZczT0mXxNJht60hfFnQ4qu3N1cWTEgs3 +Tur5k7gz3N2O5uDM3Y7m4MtxBuCQASIASIASAAAAAAACRCQAAAAAAAAEoSAAAAAAAAAAAlAAlCQA +AAAAAAAAAAASAAAAAAAAAAAAIASgAAAEJAQJQCNkbMgGOyOVnsAw5TlZ7GwMOVPKy2NgY7GzIBGx +skA2AAAAAAAAAAQkBAEghEskAxYzDPZGwK5hjMLJhjMAqmGEwumrCagomFcw2JqqtUFEsLLrV82F +o7gqljKyYYTGwMZRKUSCAQAboJnaN5Bjkneu0d5W4ccViIiOzHFWbTzNumP1Zarr8eeRMbxDW1Mx +NO67NbkhzNVnmInqzaOZrL93JyZeV0M1++7S02jvxDWxhxx033tPpC8Z6rrezWjmZyazJG2/u03h +2vFibTHoqvamiwVwY+nLGzV0+SZ1Mx8G0/45tOhzJ5lXMc3UVXRdlF1HP+iYsDPLPPy49/tz1+Te +pSIr0ho6ak5Ms5J8o2q6NImOrHV7XX488ypzTtHXo0s9t6zG7c1G1qz6ubeZiZ3UatXJG3yauSO7 +cvMTEx5tPLb3prPRMVr0HB8vicNxf0+7+kt+LOJwTJyY/Bnz3tH93X36N58cWvq6LSyiyndMSlC7 +mZcymLJiwLosmJVRLKLAtiU7q4lMSCzc3YxJuDMRuAlKAEgAAAlAkAAAAAABKAEgAAAAAJAAAAAA +AAAAAAAEgAAAAAAAAAAAAAkAAAAAAAAEAAAAAAAAAAAAAAAAAAAAAhIAAACAAAASgAAAAAAEAAAA +hGzJAImGMwzQDDZjNVuyNgUTVhNGxysZqDVmiu1G5NN2M4waM0+DCaN2cbGcQNGaMZq3JxMJxA1J +qx2bU4kU09slorWNwa20z02RXHbJbl26QvtFovbHWkxEdJt5y2MOHlr2U1W3jx+1hiw8vSO63lmI +XRTaEWmtY6snRHO1VpmJ+DjavpSZl2s8b7y4HFcnh0n0gha5ebJN55KRM2mdoiPN6fh+kpwXh0Wy +RHj5Otp/s5Ps1p62y31+em9aTMYt/OfVfxTiPjZ52naI7fBrI5t66xz5+a1rW7yx0eSL6iZjtEOX +qNbSletom3lENjh2fbHzbbWt3iVozruc+5ztWubf4M4ybpQ2Oboyrva0Vjza8WdDR4OkXt3n9ldX +kaePP9VtYqctYhdvt5oivTeCZ2YOxXk6ubqMfV0b9mrljfqlFcq88k7z2U5axeItDa1OPessuC8P +ya7XRWYnwqdbT/ZMilvIu4dpslNdixXja8Y5tt85djZdbDWnGOesRtXFtuw6T27No5Kx2OrKYQlC +ExKJgBnEpiyvdlEgsizKLKollFgWxLKJVRLKJBbEp3VxLKJBnuMWQJEbpBIAAAJAAAABIAAAAAAA +lAJAAAAAAAAAAAAAASAAAAAAAAAAAAAJAAAABAJABAlAAAAAAAAAAAAAAAAAAAAAAAAIAAAAAAAA +AAABAJQAAAAgAABAAI2EoBGyJhkgGPKxmqxAKpownHC+YRMdN5BrTj67R3bOn01o7p01Iv71u89o +b9a7LfBTfS1vWI2jf12VfQPSW8KX2mas+NC2iv6xMNfJpMnLtEbuuxtMRCtzF55NR5rPps1N/ctP +y6uHreE6nXZ4pak48X3rT06fB7fNeI33cbX6mI32R/MWu7XF116aDSRhxbRERs8f499bkyZeeKae +kzE2mdon81/tfxDLGOunwbzlzbx08oaHBvZHJlx48mrvaa94pu04y617576rNGLRRM0397JEd/lu +9Dw/S3x4qxffo6mm4NjwUiKY4iI9Ib1dHFY6QIaNabbrYrLfrpJtaK1rMzPZb/s+05IpP59OyLeJ +k7eNfRaOc1ue32I7fGXYpi5Y77M8OGMeOKxHSFsU3Y29deZMzirl6dlVvhLatCjJHeYQv1rXnps1 +8k9/VsW6qLVmZIi1rzitlvFKRvaZ2h6TSaenC9FFY+3brM+sqeG8Prp4+kZ+lvuxPkr1mqm95nfp +DXM459676a2q1dsV7XietvNno78+CJn1cjX6mOeIm0bR33dfRU5NJjidt9t5afjG/V6JZ7I2QMNh +nyo2BhsMuVG3wAhMSbbQRAMolnE+iuGUSCyJZRKuGUSCyJZK4llEgyZMYTuCUsYSCQASISAAAlCQ +AAAAAAEoASCASAAAAAAAAAAAAlACRACQAAAAAAAAAEgCEoASCAAAAAAAAAAAAAAAAAAAAAAABAAA +AAAAAAAISAIAAAAAAQAAACASgAAAQJAQAAhIDHZhln3do7z0WS18mWsajHjmes7pg3dNi5aRMNqO +yvDHTpPRaigHZhN4hHRlaVN59JY3zRENLUavaO+yq0iNVlitJ6vNcR1MVi0zO0era1/Ea0rPvbz5 +PM5MWp45qvo2GZrhmfrsnpHpHzTCseEcM/2vrr8Q1Eb4qzy44nziPN63HpYiIiI7LNHoqabBTFii +IpSNohuVxrKtWMEejPwY9G1FFmHB4mWJn7MdfnIM9JpIx15to5pbUaas/a6rqViI7MxPxqX0UT1r +O3wVzpbR2hviP5i03Y5s6a879FNtHljydhExCv8AMTPJXBnRZbz0iG5ptFjwe/l96zctMVamTJtE +yTMibu1VrdTzRMR0j0ed4lr64MVpm0RERvMz5NvX62uOJ69XhOKX1HH9bHDtFvNYnfJeOy0Z2ojX +6jjnEq6fRUmccTvN/J9H0eKcOnx45neaxEbubwHgOHg+milI3vP2resu3Wu0JQmITsmISDHZHKz2 +JgFc1RMLJhGwK9iIZ7MZgEdgmAEwyiWCdwWRLKJVxKYsC2JTuriWUSDNlEsIlMAySx3SCRCQSIAS +AAACRACQAAAAAAASIASAAAAAAAAAAAAAAACRACRACQASIAAAAAAAAAAAAAAAAAAAAAAAAQCUAAAA +AAAAAAIAAAAAAAAQAAAAAACBICBICAAEJAQJQCJcLjuS2ny6fPG/LWdpd1o8T0X07SXx/e7wCdJx +Wa0jmneHQpxPDMdZmJfNtZm49weZrh0/j4o7VtSZ2+Uw0/8A7o49k92vBLc/ntFohFW9PqGXimOI +6Tu1L8T3eCx6r2t1O3JwvHjifO99v7t/Bwf2l1PXU6rS6eJ8qUm8x+so5TsekzcSjbvs4mt4rzW5 +K2mbT0itesy2cHsvbvqtbmyz5xERWP2jd1tJwrTaONsOKtZ8585+cnDrzmn4Rq+IZObUROHD32n7 +Vv8A0ej0uhxaXFGPFSK1j0bkY4jyZRVZVXFGUVWbGwKsk8mObekNrSW3pWf1a2aYjHbm7bNnQ1id +PW0TvuDdhJEbQABMsLW2R0ZTMQrvfbz2YWzVhpanUxEd0dWkW5c8R5uXxDX1w4pnfr5Q19XxKuOJ +2neXltVqtVxbV/RdJ715+1bypANfiOu1HENV9C0MTfNeesx2rD1PAeBYuE6aKx72W3W9/WVnBuB4 +eF4dqRzZbdb5J72l160WVK02ZxCYhOwI23TsnY2BGxsnYBjsiYZsZBjMMZZSgGEolMsQDdG6NwZ7 +piVe6YkFsSziVMWZRILolMSriWUSCyJTuwhMSDMRCQSI3SAlACRCQAAEoAEoASAAAAAAAAACUACR +ACQAAAAAAAAAAAAASAAAAAAAAAAAAAAAAAAACAAAAAAAAAAAAAABAAAAAAAAAAAAACBKAAAAAAAQ +JQAAAhICEbJAYTWJ7wx8KvpC0BV4ceieWGewDHlNmWwCNjZICNhIDmcZredBecdpiY69FXCOLW+i +UiZidukulmxxlx2paN4mNng+K4+I8Hy2yaTfl37TXetoCPfRxfp1qi3F48ofKMvtvxak8s6LDv61 +rZji9rPaLUf5PC+bfttS0q8q3p9W/wBrRMdpUZuKdN99nzvFqPbTVz7nD8OKs+do2/mW3h4D7Xaq +ZnPrtNpqz35aRaYOHY9Zk4pNt9rR+rl6zi+OnS+WN57Rv1lXp/YrNaYtruL6zNPnGO3hxP6O5w/2 +f0HDuun09Yv55Le9afznqcOvO4tBreMTHu30unnva0bWt8on+70nDuE4OHYYx4Kbesz3tPrMuhGO +IjpDOKrK9YVpsyiGUQnYGOyUgI2SlAIEmwMWMs9kTAMJYzDOYRMArmGErZhhMArlHmzmGMwDE3Ts +bAbs4swj5pgFkSziVcM4BZEsolXDKAZwyhjCYBkACQhIAAAAAAAJAAAAAAAAAAAAAAAAAAAShIAA +AAAAAAJAAAAAAAAAAAAAABAJEAAAAAAAAAAAAAAAIEoBKAAAAAAAAAAAAAAABAlAAAAAAAIAAAAA +BAkBAkBAkBAlACEgMZjdjbFW8bWrEx8YWANb6Fp+bfwab+vLDKMFK9qxH5L0bAr8OPRPKz2AY7J2 +SbAjYZAI2E7AIEgIEgIEgMdkSy2NgY7MdlmyNoBXsxmFuyNgVTVjNV3KjlBRNTlXTVHKCrlIqt5T +lBhEMohlFerLlBjEMohMVTEARDKCITsAk2AEgAAAkAAAAAAAAAAAAAAAAAAAAAAAASAAAAAAAAD/ +2Q==`;async function X2(e){let t=(n,s="application/octet-stream")=>fetch(`data:${s};base64,${n}`).then(a=>a.blob()),o,A;switch(e.config.warmup){case"face":o=await t(Tt);break;case"body":case"full":o=await t(Rt);break;default:o=null}if(o){let n=await createImageBitmap(o);A=await e.detect(n,e.config),n.close()}return A}async function D2(e){return new Promise(t=>{let o;switch(e.config.warmup){case"face":o="data:image/jpeg;base64,"+Tt;break;case"full":case"body":o="data:image/jpeg;base64,"+Rt;break;default:o=null}let A;if(typeof Image!="undefined")A=new Image;else if(R.Image)A=new R.Image;else return;A.onload=async()=>{let n=s0(A.naturalWidth,A.naturalHeight);if(!n)g("Warmup: Canvas not found"),t(void 0);else{let s=n.getContext("2d");s&&s.drawImage(A,0,0);let a=await e.image(n),i=await e.detect(a.tensor,e.config);t(i)}},o?A.src=o:t(void 0)})}async function q2(e){let t=n=>Buffer.from(n,"base64"),o;e.config.warmup==="face"?o=t(Tt):o=t(Rt);let A;if("node"in r){let n=r.node.decodeJpeg(o),s=n.expandDims(0);e.tf.dispose(n),A=await e.detect(s,e.config),e.tf.dispose(s)}else e.config.debug&&g("Warmup tfjs-node not loaded");return A}async function nr(e,t){let o=M();if(e.state="warmup",t&&(e.config=o0(e.config,t)),!e.config.warmup||e.config.warmup.length===0||e.config.warmup==="none")return{face:[],body:[],hand:[],gesture:[],object:[],performance:e.performance,timestamp:M(),persons:[],error:null};let A;return new Promise(async n=>{typeof createImageBitmap=="function"?A=await X2(e):typeof Image!="undefined"||R.Canvas!==void 0?A=await D2(e):A=await q2(e);let s=M();e.config.debug&&g("Warmup",e.config.warmup,Math.round(s-o),"ms"),e.emit("warmup"),n(A)})}var Te,Fe,Ve,wt,J2=class{constructor(t){w(this,"version");w(this,"config");w(this,"result");w(this,"state");w(this,"process");w(this,"tf");w(this,"env");w(this,"draw");w(this,"models");w(this,"events");w(this,"faceTriangulation");w(this,"faceUVMap");w(this,"performance");Ee(this,Te,void 0);Ee(this,Fe,void 0);Ee(this,Ve,void 0);w(this,"gl");w(this,"analyze",(...t)=>{if(!ke(this,Fe))return;let o=this.tf.engine().state.numTensors,A=ke(this,Te);ze(this,Te,o);let n=o-A;n!==0&&g(...t,n)});Ee(this,wt,t=>{if(!ke(this,Ve))return null;if(!t)return"input is not defined";if(this.env.node&&!(t instanceof se))return"input must be a tensor";try{this.tf.getBackend()}catch(o){return"backend not loaded"}return null});w(this,"similarity",tr);w(this,"distance",vt);w(this,"match",or);w(this,"emit",t=>{var o;this.events&&this.events.dispatchEvent&&((o=this.events)==null||o.dispatchEvent(new Event(t)))});this.env=R,Y0.wasmPath=je["tfjs-core"].includes("-")?"https://vladmandic.github.io/tfjs/dist/":`https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${r.version_core}/dist/`,Y0.modelBasePath=R.browser?"../models/":"file://models/",Y0.backend=R.browser?"humangl":"tensorflow",this.version=Wt,Object.defineProperty(this,"version",{value:Wt}),this.config=JSON.parse(JSON.stringify(Y0)),Object.seal(this.config),t&&(this.config=o0(this.config,t)),this.config.cacheModels=typeof indexedDB!="undefined",To(this.config),this.tf=r,this.state="idle",ze(this,Te,0),ze(this,Fe,!1),ze(this,Ve,!1),this.performance={},this.events=typeof EventTarget!="undefined"?new EventTarget:void 0,this.models=new J5,this.draw={options:B0,canvas:(o,A)=>Dn(o,A),face:(o,A,n)=>$5(o,A,n),body:(o,A,n)=>eo(o,A,n),hand:(o,A,n)=>to(o,A,n),gesture:(o,A,n)=>_5(o,A,n),object:(o,A,n)=>oo(o,A,n),person:(o,A,n)=>Xn(o,A,n),all:(o,A,n)=>qn(o,A,n)},this.result={face:[],body:[],hand:[],gesture:[],object:[],performance:{},timestamp:0,persons:[],error:null},this.process={tensor:null,canvas:null},this.faceTriangulation=IA,this.faceUVMap=OA,this.gl=q,this.emit("create")}reset(){let t=this.config.backend;this.config=JSON.parse(JSON.stringify(Y0)),this.config.backend=t}validate(t){return Et(Y0,t||this.config)}now(){return M()}image(t,o=!0){return ie(t,this.config,o)}async segmentation(t,o){return On(t,o,this.config)}enhance(t){return p5(t)}compare(t,o){return Po(this.config,t,o)}async init(){await Pt(this,!0),await this.tf.ready()}async load(t){this.state="load";let o=M(),A=Object.values(this.models).filter(a=>a).length;t&&(this.config=o0(this.config,t)),this.env.initial&&(this.config.debug&&g(`version: ${this.version}`),this.config.debug&&g(`tfjs version: ${this.tf.version["tfjs-core"]}`),await Pt(this)||g("error: backend check failed"),await r.ready(),this.env.browser&&(this.config.debug&&g("configuration:",this.config),this.config.debug&&g("environment:",this.env),this.config.debug&&g("tf flags:",this.tf.ENV.flags))),await Ln(this),this.env.initial&&this.config.debug&&g("tf engine state:",this.tf.engine().state.numBytes,"bytes",this.tf.engine().state.numTensors,"tensors"),this.env.initial=!1,Object.values(this.models).filter(a=>a).length!==A&&(await Bn(this),this.emit("load"));let s=Math.trunc(M()-o);s>(this.performance.loadModels||0)&&(this.performance.loadModels=this.env.perfadd?(this.performance.loadModels||0)+s:s)}next(t=this.result){return $n(t,this.config)}async warmup(t){let o=M(),A=await nr(this,t),n=M();return this.performance.warmup=Math.trunc(n-o),A}async profile(t,o){let A=await this.tf.profile(()=>this.detect(t,o)),n={};for(let i of A.kernels)n[i.name]?n[i.name]+=i.kernelTimeMs:n[i.name]=i.kernelTimeMs;let s=[];Object.entries(n).forEach(i=>s.push({name:i[0],ms:i[1]})),s.sort((i,x)=>x.ms-i.ms),s.length=20;let a={};for(let i of s)a[i.name]=i.ms;return a}async detect(t,o){return this.state="detect",new Promise(async A=>{var v,b,P,p,u,E,W,C,S,O,V,B,H,t0,z,m0,G0,T,Q,k,X,Y;this.state="config";let n;this.config=o0(this.config,o),this.state="check";let s=ke(this,wt).call(this,t);s&&(g(s,t),this.emit("error"),A({face:[],body:[],hand:[],gesture:[],object:[],performance:this.performance,timestamp:M(),persons:[],error:s}));let a=M();await Pt(this),await this.load(),n=M(),this.state="image";let i=await ie(t,this.config);if(this.process=i,this.performance.inputProcess=this.env.perfadd?(this.performance.inputProcess||0)+Math.trunc(M()-n):Math.trunc(M()-n),this.analyze("Get Image:"),!i.tensor){this.config.debug&&g("could not convert input to tensor"),this.emit("error"),A({face:[],body:[],hand:[],gesture:[],object:[],performance:this.performance,timestamp:M(),persons:[],error:"could not convert input to tensor"});return}this.emit("image"),n=M(),this.config.skipAllowed=await Mo(this.config,i.tensor),this.performance.totalFrames||(this.performance.totalFrames=0),this.performance.cachedFrames||(this.performance.cachedFrames=0),this.performance.totalFrames++,this.config.skipAllowed&&this.performance.cachedFrames++,this.performance.cacheCheck=this.env.perfadd?(this.performance.cacheCheck||0)+Math.trunc(M()-n):Math.trunc(M()-n),this.analyze("Check Changed:");let x=[],d=[],l=[],y=[];this.state="detect:face",this.config.async?(x=this.config.face.enabled?no(this,i.tensor):[],this.performance.face&&delete this.performance.face):(n=M(),x=this.config.face.enabled?await no(this,i.tensor):[],this.performance.face=this.env.perfadd?(this.performance.face||0)+Math.trunc(M()-n):Math.trunc(M()-n)),this.config.async&&(this.config.body.maxDetected===-1||this.config.hand.maxDetected===-1)&&(x=await x),this.analyze("Start Body:"),this.state="detect:body";let c=this.config.body.maxDetected===-1?o0(this.config,{body:{maxDetected:this.config.face.enabled?1*x.length:1}}):this.config;this.config.async?(((v=this.config.body.modelPath)==null?void 0:v.includes("posenet"))?d=this.config.body.enabled?D5(i.tensor,c):[]:((b=this.config.body.modelPath)==null?void 0:b.includes("blazepose"))?d=this.config.body.enabled?e5(i.tensor,c):[]:((P=this.config.body.modelPath)==null?void 0:P.includes("efficientpose"))?d=this.config.body.enabled?a5(i.tensor,c):[]:((p=this.config.body.modelPath)==null?void 0:p.includes("movenet"))&&(d=this.config.body.enabled?L5(i.tensor,c):[]),this.performance.body&&delete this.performance.body):(n=M(),((u=this.config.body.modelPath)==null?void 0:u.includes("posenet"))?d=this.config.body.enabled?await D5(i.tensor,c):[]:((E=this.config.body.modelPath)==null?void 0:E.includes("blazepose"))?d=this.config.body.enabled?await e5(i.tensor,c):[]:((W=this.config.body.modelPath)==null?void 0:W.includes("efficientpose"))?d=this.config.body.enabled?await a5(i.tensor,c):[]:((C=this.config.body.modelPath)==null?void 0:C.includes("movenet"))&&(d=this.config.body.enabled?await L5(i.tensor,c):[]),this.performance.body=this.env.perfadd?(this.performance.body||0)+Math.trunc(M()-n):Math.trunc(M()-n)),this.analyze("End Body:"),this.analyze("Start Hand:"),this.state="detect:hand";let f=this.config.hand.maxDetected===-1?o0(this.config,{hand:{maxDetected:this.config.face.enabled?2*x.length:1}}):this.config;this.config.async?(((O=(S=this.config.hand.detector)==null?void 0:S.modelPath)==null?void 0:O.includes("handdetect"))?l=this.config.hand.enabled?R5(i.tensor,f):[]:((B=(V=this.config.hand.detector)==null?void 0:V.modelPath)==null?void 0:B.includes("handtrack"))&&(l=this.config.hand.enabled?z5(i.tensor,f):[]),this.performance.hand&&delete this.performance.hand):(n=M(),((t0=(H=this.config.hand.detector)==null?void 0:H.modelPath)==null?void 0:t0.includes("handdetect"))?l=this.config.hand.enabled?await R5(i.tensor,f):[]:((m0=(z=this.config.hand.detector)==null?void 0:z.modelPath)==null?void 0:m0.includes("handtrack"))&&(l=this.config.hand.enabled?await z5(i.tensor,f):[]),this.performance.hand=this.env.perfadd?(this.performance.hand||0)+Math.trunc(M()-n):Math.trunc(M()-n)),this.analyze("End Hand:"),this.analyze("Start Object:"),this.state="detect:object",this.config.async?(((G0=this.config.object.modelPath)==null?void 0:G0.includes("nanodet"))?y=this.config.object.enabled?G5(i.tensor,this.config):[]:((T=this.config.object.modelPath)==null?void 0:T.includes("centernet"))&&(y=this.config.object.enabled?A5(i.tensor,this.config):[]),this.performance.object&&delete this.performance.object):(n=M(),((Q=this.config.object.modelPath)==null?void 0:Q.includes("nanodet"))?y=this.config.object.enabled?await G5(i.tensor,this.config):[]:((k=this.config.object.modelPath)==null?void 0:k.includes("centernet"))&&(y=this.config.object.enabled?await A5(i.tensor,this.config):[]),this.performance.object=this.env.perfadd?(this.performance.object||0)+Math.trunc(M()-n):Math.trunc(M()-n)),this.analyze("End Object:"),this.state="detect:await",this.config.async&&([x,d,l,y]=await Promise.all([x,d,l,y])),this.state="detect:gesture";let h=[];this.config.gesture.enabled&&(n=M(),h=[...Kn(x),...Yn(d),..._n(l),...Qn(x)],this.config.async?this.performance.gesture&&delete this.performance.gesture:this.performance.gesture=this.env.perfadd?(this.performance.gesture||0)+Math.trunc(M()-n):Math.trunc(M()-n)),this.performance.total=this.env.perfadd?(this.performance.total||0)+Math.trunc(M()-a):Math.trunc(M()-a);let m=((Y=(X=this.process)==null?void 0:X.tensor)==null?void 0:Y.shape)||[];this.result={face:x,body:d,hand:l,gesture:h,object:y,performance:this.performance,canvas:this.process.canvas,timestamp:Date.now(),error:null,get persons(){return Ar(x,d,l,h,m)}},r.dispose(i.tensor),this.emit("detect"),this.state="idle",A(this.result)})}};Te=new WeakMap,Fe=new WeakMap,Ve=new WeakMap,wt=new WeakMap;export{J2 as Human,J2 as default,Y0 as defaults,R as env}; +/** + * Human main module + * @default Human Library + * @summary + * @author + * @copyright + * @license MIT + */ +//# sourceMappingURL=human.esm-nobundle.js.map diff --git a/dist/human.esm.d.ts b/dist/human.esm.d.ts new file mode 100644 index 00000000..fca0ad10 --- /dev/null +++ b/dist/human.esm.d.ts @@ -0,0 +1,2520 @@ +/// + +/** meta-function that performs draw for: canvas, face, body, hand */ +declare function all(inCanvas: AnyCanvas, result: Result, drawOptions?: Partial): Promise<[void, void, void, void, void] | null>; + +/** Defines all possible canvas types */ +export declare type AnyCanvas = HTMLCanvasElement | OffscreenCanvas; + +/** Defines all possible image types */ +export declare type AnyImage = HTMLImageElement | typeof Image; + +/** Defines all possible video types */ +export declare type AnyVideo = HTMLMediaElement | HTMLVideoElement; + +/** @docalias number[] */ +declare interface ArrayMap { + R0: number; + R1: number[]; + R2: number[][]; + R3: number[][][]; + R4: number[][][][]; + R5: number[][][][][]; + R6: number[][][][][][]; +} + +/** Possible TensorFlow backends */ +export declare type BackendType = ['cpu', 'wasm', 'webgl', 'humangl', 'tensorflow', 'webgpu']; + +/** draw detected bodies */ +declare function body(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +export declare type BodyAnnotation = BodyAnnotationBlazePose | BodyAnnotationEfficientPose; + +export declare type BodyAnnotationBlazePose = 'leftLeg' | 'rightLeg' | 'torso' | 'leftArm' | 'rightArm' | 'leftEye' | 'rightEye' | 'mouth'; + +export declare type BodyAnnotationEfficientPose = 'leftLeg' | 'rightLeg' | 'torso' | 'leftArm' | 'rightArm' | 'head'; + +/** Configures all body detection specific options */ +export declare interface BodyConfig extends GenericConfig { + /** maximum number of detected bodies */ + maxDetected: number; + /** minimum confidence for a detected body before results are discarded */ + minConfidence: number; +} + +/** body gesture type */ +export declare type BodyGesture = `leaning ${'left' | 'right'}` | `raise ${'left' | 'right'} hand` | 'i give up'; + +/** Body Result keypoints */ +export declare interface BodyKeypoint { + /** body part name */ + part: BodyLandmark; + /** body part position */ + position: Point; + /** body part position normalized to 0..1 */ + positionRaw: Point; + /** body part position relative to body center in meters */ + distance?: Point; + /** body part detection score */ + score: number; +} + +export declare type BodyLandmark = BodyLandmarkPoseNet | BodyLandmarkMoveNet | BodyLandmarkEfficientNet | BodyLandmarkBlazePose; + +export declare type BodyLandmarkBlazePose = 'nose' | 'leftEyeInside' | 'leftEye' | 'leftEyeOutside' | 'rightEyeInside' | 'rightEye' | 'rightEyeOutside' | 'leftEar' | 'rightEar' | 'leftMouth' | 'rightMouth' | 'leftShoulder' | 'rightShoulder' | 'leftElbow' | 'rightElbow' | 'leftWrist' | 'rightWrist' | 'leftPinky' | 'rightPinky' | 'leftIndex' | 'rightIndex' | 'leftThumb' | 'rightThumb' | 'leftHip' | 'rightHip' | 'leftKnee' | 'rightKnee' | 'leftAnkle' | 'rightAnkle' | 'leftHeel' | 'rightHeel' | 'leftFoot' | 'rightFoot' | 'bodyCenter' | 'bodyTop' | 'leftPalm' | 'leftHand' | 'rightPalm' | 'rightHand'; + +export declare type BodyLandmarkEfficientNet = 'head' | 'neck' | 'rightShoulder' | 'rightElbow' | 'rightWrist' | 'chest' | 'leftShoulder' | 'leftElbow' | 'leftWrist' | 'bodyCenter' | 'rightHip' | 'rightKnee' | 'rightAnkle' | 'leftHip' | 'leftKnee' | 'leftAnkle'; + +export declare type BodyLandmarkMoveNet = 'nose' | 'leftEye' | 'rightEye' | 'leftEar' | 'rightEar' | 'leftShoulder' | 'rightShoulder' | 'leftElbow' | 'rightElbow' | 'leftWrist' | 'rightWrist' | 'leftHip' | 'rightHip' | 'leftKnee' | 'rightKnee' | 'leftAnkle' | 'rightAnkle'; + +export declare type BodyLandmarkPoseNet = 'nose' | 'leftEye' | 'rightEye' | 'leftEar' | 'rightEar' | 'leftShoulder' | 'rightShoulder' | 'leftElbow' | 'rightElbow' | 'leftWrist' | 'rightWrist' | 'leftHip' | 'rightHip' | 'leftKnee' | 'rightKnee' | 'leftAnkle' | 'rightAnkle'; + +/** Body results */ +export declare interface BodyResult { + /** body id */ + id: number; + /** body detection score */ + score: number; + /** detected body box */ + box: Box; + /** detected body box normalized to 0..1 */ + boxRaw: Box; + /** detected body keypoints */ + keypoints: Array; + /** detected body keypoints combined into annotated parts */ + annotations: Record; +} + +/** generic box as [x, y, width, height] */ +export declare type Box = [number, number, number, number]; + +/** + * Creates an IOHandler that loads model artifacts from user-selected files. + * + * This method can be used for loading from files such as user-selected files + * in the browser. + * When used in conjunction with `tf.loadLayersModel`, an instance of + * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts. + * + * ```js + * // Note: This code snippet won't run properly without the actual file input + * // elements in the HTML DOM. + * + * // Suppose there are two HTML file input (``) + * // elements. + * const uploadJSONInput = document.getElementById('upload-json'); + * const uploadWeightsInput = document.getElementById('upload-weights'); + * const model = await tf.loadLayersModel(tf.io.browserFiles( + * [uploadJSONInput.files[0], uploadWeightsInput.files[0]])); + * ``` + * + * @param files `File`s to load from. Currently, this function supports only + * loading from files that contain Keras-style models (i.e., `tf.Model`s), for + * which an `Array` of `File`s is expected (in that order): + * - A JSON file containing the model topology and weight manifest. + * - Optionally, One or more binary files containing the binary weights. + * These files must have names that match the paths in the `weightsManifest` + * contained by the aforementioned JSON file, or errors will be thrown + * during loading. These weights files have the same format as the ones + * generated by `tensorflowjs_converter` that comes with the `tensorflowjs` + * Python PIP package. If no weights files are provided, only the model + * topology will be loaded from the JSON file above. + * @returns An instance of `Files` `IOHandler`. + * + * @doc { + * heading: 'Models', + * subheading: 'Loading', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function browserFiles(files: File[]): IOHandler; + +/** + * Deprecated. Use `tf.io.http`. + * @param path + * @param loadOptions + */ +declare function browserHTTPRequest(path: string, loadOptions?: LoadOptions): IOHandler; + +/** draw processed canvas */ +declare function canvas(input: AnyCanvas | HTMLImageElement | HTMLVideoElement, output: AnyCanvas): Promise; + +/** + * Concatenate a number of ArrayBuffers into one. + * + * @param buffers A number of array buffers to concatenate. + * @returns Result of concatenating `buffers` in order. + */ +declare function concatenateArrayBuffers(buffers: ArrayBuffer[]): ArrayBuffer; + +/** + * Configuration interface definition for **Human** library + * Contains all configurable parameters + * Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L262) + */ +export declare interface Config { + /** Backend used for TFJS operations + * valid build-in backends are: + * - Browser: `cpu`, `wasm`, `webgl`, `humangl`, `webgpu` + * - NodeJS: `cpu`, `wasm`, `tensorflow` + * default: `humangl` for browser and `tensorflow` for nodejs + */ + backend: '' | 'cpu' | 'wasm' | 'webgl' | 'humangl' | 'tensorflow' | 'webgpu'; + /** Path to *.wasm files if backend is set to `wasm` + * + * default: auto-detects to link to CDN `jsdelivr` when running in browser + */ + wasmPath: string; + /** Print debug statements to console + * + * default: `true` + */ + debug: boolean; + /** Perform model loading and inference concurrently or sequentially + * + * default: `true` + */ + async: boolean; + /** What to use for `human.warmup()` + * - warmup pre-initializes all models for faster inference but can take significant time on startup + * - used by `webgl`, `humangl` and `webgpu` backends + * + * default: `full` + */ + warmup: '' | 'none' | 'face' | 'full' | 'body'; + /** Base model path (typically starting with file://, http:// or https://) for all models + * - individual modelPath values are relative to this path + * + * default: `../models/` for browsers and `file://models/` for nodejs + */ + modelBasePath: string; + /** Cache models in IndexDB on first sucessfull load + * default: true if indexdb is available (browsers), false if its not (nodejs) + */ + cacheModels: boolean; + /** Cache sensitivity + * - values 0..1 where 0.01 means reset cache if input changed more than 1% + * - set to 0 to disable caching + * + * default: 0.7 + */ + cacheSensitivity: number; + /** Perform immediate garbage collection on deallocated tensors instead of caching them */ + deallocate: boolean; + /** Internal Variable */ + skipAllowed: boolean; + /** Filter config {@link FilterConfig} */ + filter: Partial; + /** Gesture config {@link GestureConfig} */ + gesture: Partial; + /** Face config {@link FaceConfig} */ + face: Partial; + /** Body config {@link BodyConfig} */ + body: Partial; + /** Hand config {@link HandConfig} */ + hand: Partial; + /** Object config {@link ObjectConfig} */ + object: Partial; + /** Segmentation config {@link SegmentationConfig} */ + segmentation: Partial; +} + +/** + * Copy a model from one URL to another. + * + * This function supports: + * + * 1. Copying within a storage medium, e.g., + * `tf.io.copyModel('localstorage://model-1', 'localstorage://model-2')` + * 2. Copying between two storage mediums, e.g., + * `tf.io.copyModel('localstorage://model-1', 'indexeddb://model-1')` + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Copy the model, from Local Storage to IndexedDB. + * await tf.io.copyModel( + * 'localstorage://demo/management/model1', + * 'indexeddb://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Remove both models. + * await tf.io.removeModel('localstorage://demo/management/model1'); + * await tf.io.removeModel('indexeddb://demo/management/model1'); + * ``` + * + * @param sourceURL Source URL of copying. + * @param destURL Destination URL of copying. + * @returns ModelArtifactsInfo of the copied model (if and only if copying + * is successful). + * @throws Error if copying fails, e.g., if no model exists at `sourceURL`, or + * if `oldPath` and `newPath` are identical. + * + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function copyModel(sourceURL: string, destURL: string): Promise; + +/** + * We wrap data id since we use weak map to avoid memory leaks. + * Since we have our own memory management, we have a reference counter + * mapping a tensor to its data, so there is always a pointer (even if that + * data is otherwise garbage collectable). + * See https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/ + * Global_Objects/WeakMap + */ +declare type DataId = object; + +declare type DataToGPUOptions = DataToGPUWebGLOption; + +declare interface DataToGPUWebGLOption { + customTexShape?: [number, number]; +} + +/** @docalias 'float32'|'int32'|'bool'|'complex64'|'string' */ +declare type DataType = keyof DataTypeMap; + +declare interface DataTypeMap { + float32: Float32Array; + int32: Int32Array; + bool: Uint8Array; + complex64: Float32Array; + string: string[]; +} + +/** + * Decode flat ArrayBuffer as weights. + * + * This function does not handle sharding. + * + * This function is the reverse of `encodeWeights`. + * + * @param buffer A flat ArrayBuffer carrying the binary values of the tensors + * concatenated in the order specified in `specs`. + * @param specs Specifications of the names, dtypes and shapes of the tensors + * whose value are encoded by `buffer`. + * @return A map from tensor name to tensor value, with the names corresponding + * to names in `specs`. + * @throws Error, if any of the tensors has unsupported dtype. + */ +declare function decodeWeights(buffer: ArrayBuffer, specs: WeightsManifestEntry[]): NamedTensorMap; + +/** - [See all default Config values...](https://github.com/vladmandic/human/blob/main/src/config.ts#L262) */ +export declare const defaults: Config; + +/** Face descriptor type as number array */ +export declare type Descriptor = Array; + +/** Calculates distance between two descriptors + * @param options - calculation options + * - order - algorithm to use + * Euclidean distance if `order` is 2 (default), Minkowski distance algorithm of nth order if `order` is higher than 2 + * - multiplier - by how much to enhance difference analysis in range of 1..100 + * default is 20 which normalizes results to similarity above 0.5 can be considered a match + */ +declare function distance(descriptor1: Descriptor, descriptor2: Descriptor, options?: MatchOptions): number; + +declare namespace draw { + export { + gesture, + face, + body, + hand, + object, + person, + canvas, + all, + DrawOptions, + options + } +} + +/** Draw Options + * - Accessed via `human.draw.options` or provided per each draw method as the drawOptions optional parameter + */ +export declare type DrawOptions = { + /** draw line color */ + color: string; + /** label color */ + labelColor: string; + /** label shadow color */ + shadowColor: string; + /** label font */ + font: string; + /** line spacing between labels */ + lineHeight: number; + /** line width for drawn lines */ + lineWidth: number; + /** size of drawn points */ + pointSize: number; + /** draw rounded boxes by n pixels */ + roundRect: number; + /** should points be drawn? */ + drawPoints: boolean; + /** should labels be drawn? */ + drawLabels: boolean; + /** should detected gestures be drawn? */ + drawGestures: boolean; + /** should draw boxes around detection results? */ + drawBoxes: boolean; + /** should draw polygons from detection points? */ + drawPolygons: boolean; + /** should draw gaze arrows? */ + drawGaze: boolean; + /** should fill polygons? */ + fillPolygons: boolean; + /** use z-coordinate when available */ + useDepth: boolean; + /** should lines be curved? */ + useCurves: boolean; +}; + +export declare type Emotion = 'angry' | 'disgust' | 'fear' | 'happy' | 'sad' | 'surprise' | 'neutral'; + +/** + * Encode a map from names to weight values as an ArrayBuffer, along with an + * `Array` of `WeightsManifestEntry` as specification of the encoded weights. + * + * This function does not perform sharding. + * + * This function is the reverse of `decodeWeights`. + * + * @param tensors A map ("dict") from names to tensors. + * @param group Group to which the weights belong (optional). + * @returns A `Promise` of + * - A flat `ArrayBuffer` with all the binary values of the `Tensor`s + * concatenated. + * - An `Array` of `WeightManifestEntry`s, carrying information including + * tensor names, `dtype`s and shapes. + * @throws Error: on unsupported tensor `dtype`. + */ +declare function encodeWeights(tensors: NamedTensorMap | NamedTensor[], group?: WeightGroup): Promise<{ + data: ArrayBuffer; + specs: WeightsManifestEntry[]; +}>; + +/** Env class that holds detected capabilities */ +export declare class Env { + /** Running in Browser */ + browser: boolean; + /** Running in NodeJS */ + node: boolean; + /** Running in WebWorker thread */ + worker: boolean; + /** Detected platform */ + platform: string; + /** Detected agent */ + agent: string; + /** List of supported backends */ + backends: string[]; + /** Has any work been performed so far */ + initial: boolean; + /** Are image filters supported? */ + filter: boolean | undefined; + /** TFJS instance details */ + tfjs: { + version: undefined | string; + }; + /** Is offscreenCanvas supported? */ + offscreen: undefined | boolean; + /** Are performance counter instant values or additive */ + perfadd: boolean; + /** WASM detected capabilities */ + wasm: { + supported: undefined | boolean; + backend: undefined | boolean; + simd: undefined | boolean; + multithread: undefined | boolean; + }; + /** WebGL detected capabilities */ + webgl: { + supported: undefined | boolean; + backend: undefined | boolean; + version: undefined | string; + renderer: undefined | string; + }; + /** WebGPU detected capabilities */ + webgpu: { + supported: undefined | boolean; + backend: undefined | boolean; + adapter: undefined | string; + }; + /** CPU info */ + cpu: { + model: undefined | string; + flags: string[]; + }; + /** List of supported kernels for current backend */ + kernels: string[]; + /** MonkeyPatch for Canvas */ + Canvas: undefined; + /** MonkeyPatch for Image */ + Image: undefined; + /** MonkeyPatch for ImageData */ + ImageData: undefined; + constructor(); + /** update backend information */ + updateBackend(): Promise; + /** update cpu information */ + updateCPU(): Promise; +} + +export declare const env: Env; + +/** Events dispatched by `human.events` + * - `create`: triggered when Human object is instantiated + * - `load`: triggered when models are loaded (explicitly or on-demand) + * - `image`: triggered when input image is processed + * - `result`: triggered when detection is complete + * - `warmup`: triggered when warmup is complete + */ +export declare type Events = 'create' | 'load' | 'image' | 'result' | 'warmup' | 'error'; + +/** Defines possible externally defined canvas */ +export declare type ExternalCanvas = typeof env.Canvas; + +/** draw detected faces */ +declare function face(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +/** Anti-spoofing part of face configuration */ +export declare interface FaceAntiSpoofConfig extends GenericConfig { +} + +/** Configures all face-specific options: face detection, mesh analysis, age, gender, emotion detection and face description */ +export declare interface FaceConfig extends GenericConfig { + detector: Partial; + mesh: Partial; + iris: Partial; + description: Partial; + emotion: Partial; + antispoof: Partial; + liveness: Partial; +} + +/** Description or face embedding part of face configuration + * - also used by age and gender detection + */ +export declare interface FaceDescriptionConfig extends GenericConfig { + /** minimum confidence for a detected face before results are discarded */ + minConfidence: number; +} + +/** Detector part of face configuration */ +export declare interface FaceDetectorConfig extends GenericConfig { + /** is face rotation correction performed after detecting face? + * used to correctly analyze faces under high angles + */ + rotation: boolean; + /** maximum number of detected faces */ + maxDetected: number; + /** minimum confidence for a detected face before results are discarded */ + minConfidence: number; + /** minimum overlap between two detected faces before one is discarded */ + iouThreshold: number; + /** should child models perform on masked image of a face */ + mask: boolean; + /** should face detection return processed and cropped face tensor that can with an external model for addtional processing? + * if enabled it must be manually deallocated to avoid memory leak */ + return: boolean; +} + +/** Emotion part of face configuration */ +export declare interface FaceEmotionConfig extends GenericConfig { + /** minimum confidence for a detected face before results are discarded */ + minConfidence: number; +} + +/** face gesture type */ +export declare type FaceGesture = `facing ${'left' | 'center' | 'right'}` | `blink ${'left' | 'right'} eye` | `mouth ${number}% open` | `head ${'up' | 'down'}`; + +/** Iris part of face configuration */ +export declare interface FaceIrisConfig extends GenericConfig { +} + +export declare type FaceLandmark = 'leftEye' | 'rightEye' | 'nose' | 'mouth' | 'leftEar' | 'rightEar' | 'symmetryLine' | 'silhouette' | 'lipsUpperOuter' | 'lipsLowerOuter' | 'lipsUpperInner' | 'lipsLowerInner' | 'rightEyeUpper0' | 'rightEyeLower0' | 'rightEyeUpper1' | 'rightEyeLower1' | 'rightEyeUpper2' | 'rightEyeLower2' | 'rightEyeLower3' | 'rightEyebrowUpper' | 'rightEyebrowLower' | 'rightEyeIris' | 'leftEyeUpper0' | 'leftEyeLower0' | 'leftEyeUpper1' | 'leftEyeLower1' | 'leftEyeUpper2' | 'leftEyeLower2' | 'leftEyeLower3' | 'leftEyebrowUpper' | 'leftEyebrowLower' | 'leftEyeIris' | 'midwayBetweenEyes' | 'noseTip' | 'noseBottom' | 'noseRightCorner' | 'noseLeftCorner' | 'rightCheek' | 'leftCheek'; + +/** Liveness part of face configuration */ +export declare interface FaceLivenessConfig extends GenericConfig { +} + +/** Mesh part of face configuration */ +export declare interface FaceMeshConfig extends GenericConfig { +} + +/** Face results + * - Combined results of face detector, face mesh, age, gender, emotion, embedding, iris models + * - Some values may be null if specific model is not enabled + */ +export declare interface FaceResult { + /** face id */ + id: number; + /** overall face score */ + score: number; + /** detection score */ + boxScore: number; + /** mesh score */ + faceScore: number; + /** detected face box */ + box: Box; + /** detected face box normalized to 0..1 */ + boxRaw: Box; + /** detected face mesh */ + mesh: Array; + /** detected face mesh normalized to 0..1 */ + meshRaw: Array; + /** mesh keypoints combined into annotated results */ + annotations: Record; + /** detected age */ + age?: number; + /** detected gender */ + gender?: Gender; + /** gender detection score */ + genderScore?: number; + /** detected emotions */ + emotion?: Array<{ + score: number; + emotion: Emotion; + }>; + /** detected race */ + race?: Array<{ + score: number; + race: Race; + }>; + /** face descriptor */ + embedding?: Array; + /** face iris distance from camera */ + iris?: number; + /** face anti-spoofing result confidence */ + real?: number; + /** face liveness result confidence */ + live?: number; + /** face rotation details */ + rotation?: { + angle: { + roll: number; + yaw: number; + pitch: number; + }; + matrix: [number, number, number, number, number, number, number, number, number]; + gaze: { + bearing: number; + strength: number; + }; + } | null; + /** detected face as tensor that can be used in further pipelines */ + tensor?: Tensor; +} + +/** Run input through image filters before inference + * - available only in Browser environments + * - image filters run with near-zero latency as they are executed on the GPU using WebGL + */ +export declare interface FilterConfig { + /** are image filters enabled? */ + enabled: boolean; + /** perform image histogram equalization + * - equalization is performed on input as a whole and detected face before its passed for further analysis + */ + equalization: boolean; + /** resize input width + * - if both width and height are set to 0, there is no resizing + * - if just one is set, second one is scaled automatically + * - if both are set, values are used as-is + */ + width: number; + /** resize input height + * - if both width and height are set to 0, there is no resizing + * - if just one is set, second one is scaled automatically + * - if both are set, values are used as-is + */ + height: number; + /** return processed canvas imagedata in result */ + return: boolean; + /** flip input as mirror image */ + flip: boolean; + /** range: -1 (darken) to 1 (lighten) */ + brightness: number; + /** range: -1 (reduce contrast) to 1 (increase contrast) */ + contrast: number; + /** range: 0 (no sharpening) to 1 (maximum sharpening) */ + sharpness: number; + /** range: 0 (no blur) to N (blur radius in pixels) */ + blur: number; + /** range: -1 (reduce saturation) to 1 (increase saturation) */ + saturation: number; + /** range: 0 (no change) to 360 (hue rotation in degrees) */ + hue: number; + /** image negative */ + negative: boolean; + /** image sepia colors */ + sepia: boolean; + /** image vintage colors */ + vintage: boolean; + /** image kodachrome colors */ + kodachrome: boolean; + /** image technicolor colors */ + technicolor: boolean; + /** image polaroid camera effect */ + polaroid: boolean; + /** range: 0 (no pixelate) to N (number of pixels to pixelate) */ + pixelate: number; +} + +export declare type Finger = 'index' | 'middle' | 'pinky' | 'ring' | 'thumb' | 'palm'; + +export declare type FingerCurl = 'none' | 'half' | 'full'; + +export declare type FingerDirection = 'verticalUp' | 'verticalDown' | 'horizontalLeft' | 'horizontalRight' | 'diagonalUpRight' | 'diagonalUpLeft' | 'diagonalDownRight' | 'diagonalDownLeft'; + +/** + * Creates an IOHandler that loads model artifacts from memory. + * + * When used in conjunction with `tf.loadLayersModel`, an instance of + * `tf.LayersModel` (Keras-style) can be constructed from the loaded artifacts. + * + * ```js + * const model = await tf.loadLayersModel(tf.io.fromMemory( + * modelTopology, weightSpecs, weightData)); + * ``` + * + * @param modelArtifacts a object containing model topology (i.e., parsed from + * the JSON format). + * @param weightSpecs An array of `WeightsManifestEntry` objects describing the + * names, shapes, types, and quantization of the weight data. + * @param weightData A single `ArrayBuffer` containing the weight data, + * concatenated in the order described by the weightSpecs. + * @param trainingConfig Model training configuration. Optional. + * + * @returns A passthrough `IOHandler` that simply loads the provided data. + */ +declare function fromMemory(modelArtifacts: {} | ModelArtifacts, weightSpecs?: WeightsManifestEntry[], weightData?: ArrayBuffer, trainingConfig?: TrainingConfig): IOHandler; + +export declare type Gender = 'male' | 'female' | 'unknown'; + +/** Generic config type inherited by all module types */ +export declare interface GenericConfig { + /** is module enabled? */ + enabled: boolean; + /** path to model json file (relative to `modelBasePath` */ + modelPath: string; + /** how many max frames to go without re-running model if cached results are acceptable + * for two-phase models such as face and hand caching applies to bounding boxes detection only */ + skipFrames: number; + /** how many max milliseconds to go without re-running model if cached results are acceptable + * for two-phase models such as face and hand caching applies to bounding boxes detection only */ + skipTime: number; +} + +/** draw detected gestures */ +declare function gesture(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +/** Controlls gesture detection */ +export declare interface GestureConfig { + /** is gesture detection enabled? */ + enabled: boolean; +} + +/** Gesture combined results + * Each result has: + * - part: part name and number where gesture was detected: `face`, `iris`, `body`, `hand` + * - gesture: gesture detected + */ +export declare type GestureResult = { + 'face': number; + gesture: FaceGesture; +} | { + 'iris': number; + gesture: IrisGesture; +} | { + 'body': number; + gesture: BodyGesture; +} | { + 'hand': number; + gesture: HandGesture; +}; + +declare const getLoadHandlers: (url: string | string[], loadOptions?: LoadOptions) => IOHandler[]; + +/** + * Create `ModelArtifacts` from a JSON file. + * + * @param modelJSON Object containing the parsed JSON of `model.json` + * @param loadWeights Function that takes the JSON file's weights manifest, + * reads weights from the listed path(s), and returns a Promise of the + * weight manifest entries along with the weights data. + * @returns A Promise of the `ModelArtifacts`, as described by the JSON file. + */ +declare function getModelArtifactsForJSON(modelJSON: ModelJSON, loadWeights: (weightsManifest: WeightsManifestConfig) => Promise<[WeightsManifestEntry[], /* weightData */ ArrayBuffer]>): Promise; + +/** + * Populate ModelArtifactsInfo fields for a model with JSON topology. + * @param modelArtifacts + * @returns A ModelArtifactsInfo object. + */ +declare function getModelArtifactsInfoForJSON(modelArtifacts: ModelArtifacts): ModelArtifactsInfo; + +declare const getSaveHandlers: (url: string | string[]) => IOHandler[]; + +declare interface GPUData { + tensorRef: Tensor; + texture?: WebGLTexture; + texShape?: [number, number]; +} + +/** + * A `tf.GraphModel` is a directed, acyclic graph built from a + * SavedModel GraphDef and allows inference execution. + * + * A `tf.GraphModel` can only be created by loading from a model converted from + * a [TensorFlow SavedModel](https://www.tensorflow.org/guide/saved_model) using + * the command line converter tool and loaded via `tf.loadGraphModel`. + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ +export declare class GraphModel implements InferenceModel { + private modelUrl; + private loadOptions; + private executor; + private version; + private handler; + private artifacts; + private initializer; + private resourceManager; + private signature; + readonly modelVersion: string; + readonly inputNodes: string[]; + readonly outputNodes: string[]; + readonly inputs: TensorInfo[]; + readonly outputs: TensorInfo[]; + readonly weights: NamedTensorsMap; + readonly metadata: {}; + readonly modelSignature: {}; + /** + * @param modelUrl url for the model, or an `io.IOHandler`. + * @param weightManifestUrl url for the weight file generated by + * scripts/convert.py script. + * @param requestOption options for Request, which allows to send credentials + * and custom headers. + * @param onProgress Optional, progress callback function, fired periodically + * before the load is completed. + */ + constructor(modelUrl: string | io.IOHandler, loadOptions?: io.LoadOptions); + private findIOHandler; + /** + * Loads the model and weight files, construct the in memory weight map and + * compile the inference graph. + */ + load(): Promise; + /** + * Synchronously construct the in memory weight map and + * compile the inference graph. Also initialize hashtable if any. + * + * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true} + */ + loadSync(artifacts: io.ModelArtifacts): boolean; + /** + * Save the configuration and/or weights of the GraphModel. + * + * An `IOHandler` is an object that has a `save` method of the proper + * signature defined. The `save` method manages the storing or + * transmission of serialized data ("artifacts") that represent the + * model's topology and weights onto or via a specific medium, such as + * file downloads, local storage, IndexedDB in the web browser and HTTP + * requests to a server. TensorFlow.js provides `IOHandler` + * implementations for a number of frequently used saving mediums, such as + * `tf.io.browserDownloads` and `tf.io.browserLocalStorage`. See `tf.io` + * for more details. + * + * This method also allows you to refer to certain types of `IOHandler`s + * as URL-like string shortcuts, such as 'localstorage://' and + * 'indexeddb://'. + * + * Example 1: Save `model`'s topology and weights to browser [local + * storage](https://developer.mozilla.org/en-US/docs/Web/API/Window/localStorage); + * then load it back. + * + * ```js + * const modelUrl = + * 'https://storage.googleapis.com/tfjs-models/savedmodel/mobilenet_v2_1.0_224/model.json'; + * const model = await tf.loadGraphModel(modelUrl); + * const zeros = tf.zeros([1, 224, 224, 3]); + * model.predict(zeros).print(); + * + * const saveResults = await model.save('localstorage://my-model-1'); + * + * const loadedModel = await tf.loadGraphModel('localstorage://my-model-1'); + * console.log('Prediction from loaded model:'); + * model.predict(zeros).print(); + * ``` + * + * @param handlerOrURL An instance of `IOHandler` or a URL-like, + * scheme-based string shortcut for `IOHandler`. + * @param config Options for saving the model. + * @returns A `Promise` of `SaveResult`, which summarizes the result of + * the saving, such as byte sizes of the saved artifacts for the model's + * topology and weight values. + * + * @doc {heading: 'Models', subheading: 'Classes', ignoreCI: true} + */ + save(handlerOrURL: io.IOHandler | string, config?: io.SaveConfig): Promise; + /** + * Execute the inference for the input tensors. + * + * @param input The input tensors, when there is single input for the model, + * inputs param should be a `tf.Tensor`. For models with mutliple inputs, + * inputs params should be in either `tf.Tensor`[] if the input order is + * fixed, or otherwise NamedTensorMap format. + * + * For model with multiple inputs, we recommend you use NamedTensorMap as the + * input type, if you use `tf.Tensor`[], the order of the array needs to + * follow the + * order of inputNodes array. @see {@link GraphModel.inputNodes} + * + * You can also feed any intermediate nodes using the NamedTensorMap as the + * input type. For example, given the graph + * InputNode => Intermediate => OutputNode, + * you can execute the subgraph Intermediate => OutputNode by calling + * model.execute('IntermediateNode' : tf.tensor(...)); + * + * This is useful for models that uses tf.dynamic_rnn, where the intermediate + * state needs to be fed manually. + * + * For batch inference execution, the tensors for each input need to be + * concatenated together. For example with mobilenet, the required input shape + * is [1, 244, 244, 3], which represents the [batch, height, width, channel]. + * If we are provide a batched data of 100 images, the input tensor should be + * in the shape of [100, 244, 244, 3]. + * + * @param config Prediction configuration for specifying the batch size and + * output node names. Currently the batch size option is ignored for graph + * model. + * + * @returns Inference result tensors. The output would be single `tf.Tensor` + * if model has single output node, otherwise Tensor[] or NamedTensorMap[] + * will be returned for model with multiple outputs. + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + predict(inputs: Tensor | Tensor[] | NamedTensorMap, config?: ModelPredictConfig): Tensor | Tensor[] | NamedTensorMap; + private normalizeInputs; + private normalizeOutputs; + /** + * Executes inference for the model for given input tensors. + * @param inputs tensor, tensor array or tensor map of the inputs for the + * model, keyed by the input node names. + * @param outputs output node name from the Tensorflow model, if no + * outputs are specified, the default outputs of the model would be used. + * You can inspect intermediate nodes of the model by adding them to the + * outputs array. + * + * @returns A single tensor if provided with a single output or no outputs + * are provided and there is only one default output, otherwise return a + * tensor array. The order of the tensor array is the same as the outputs + * if provided, otherwise the order of outputNodes attribute of the model. + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + execute(inputs: Tensor | Tensor[] | NamedTensorMap, outputs?: string | string[]): Tensor | Tensor[]; + /** + * Executes inference for the model for given input tensors in async + * fashion, use this method when your model contains control flow ops. + * @param inputs tensor, tensor array or tensor map of the inputs for the + * model, keyed by the input node names. + * @param outputs output node name from the Tensorflow model, if no outputs + * are specified, the default outputs of the model would be used. You can + * inspect intermediate nodes of the model by adding them to the outputs + * array. + * + * @returns A Promise of single tensor if provided with a single output or + * no outputs are provided and there is only one default output, otherwise + * return a tensor map. + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + executeAsync(inputs: Tensor | Tensor[] | NamedTensorMap, outputs?: string | string[]): Promise; + /** + * Get intermediate tensors for model debugging mode (flag + * KEEP_INTERMEDIATE_TENSORS is true). + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + getIntermediateTensors(): NamedTensorsMap; + /** + * Dispose intermediate tensors for model debugging mode (flag + * KEEP_INTERMEDIATE_TENSORS is true). + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + disposeIntermediateTensors(): void; + private convertTensorMapToTensorsMap; + /** + * Releases the memory used by the weight tensors and resourceManager. + * + * @doc {heading: 'Models', subheading: 'Classes'} + */ + dispose(): void; +} + +/** draw detected hands */ +declare function hand(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +/** Configures all hand detection specific options */ +export declare interface HandConfig extends GenericConfig { + /** should hand rotation correction be performed after hand detection? */ + rotation: boolean; + /** minimum confidence for a detected hand before results are discarded */ + minConfidence: number; + /** minimum overlap between two detected hands before one is discarded */ + iouThreshold: number; + /** maximum number of detected hands */ + maxDetected: number; + /** should hand landmarks be detected or just return detected hand box */ + landmarks: boolean; + detector: { + /** path to hand detector model json */ + modelPath?: string; + }; + skeleton: { + /** path to hand skeleton model json */ + modelPath?: string; + }; +} + +/** hand gesture type */ +export declare type HandGesture = `${'thumb' | 'index' | 'middle' | 'ring' | 'pinky'} forward` | `${'thumb' | 'index' | 'middle' | 'ring' | 'pinky'} up` | 'victory' | 'thumbs up'; + +/** Hand results */ +export declare interface HandResult { + /** hand id */ + id: number; + /** hand overal score */ + score: number; + /** hand detection score */ + boxScore: number; + /** hand skelton score */ + fingerScore: number; + /** detected hand box */ + box: Box; + /** detected hand box normalized to 0..1 */ + boxRaw: Box; + /** detected hand keypoints */ + keypoints: Array; + /** detected hand class */ + label: HandType; + /** detected hand keypoints combined into annotated parts */ + annotations: Record>; + /** detected hand parts annotated with part gestures */ + landmarks: Record; +} + +export declare type HandType = 'hand' | 'fist' | 'pinch' | 'point' | 'face' | 'tip' | 'pinchtip'; + +/** + * Creates an IOHandler subtype that sends model artifacts to HTTP server. + * + * An HTTP request of the `multipart/form-data` mime type will be sent to the + * `path` URL. The form data includes artifacts that represent the topology + * and/or weights of the model. In the case of Keras-style `tf.Model`, two + * blobs (files) exist in form-data: + * - A JSON file consisting of `modelTopology` and `weightsManifest`. + * - A binary weights file consisting of the concatenated weight values. + * These files are in the same format as the one generated by + * [tfjs_converter](https://js.tensorflow.org/tutorials/import-keras.html). + * + * The following code snippet exemplifies the client-side code that uses this + * function: + * + * ```js + * const model = tf.sequential(); + * model.add( + * tf.layers.dense({units: 1, inputShape: [100], activation: 'sigmoid'})); + * + * const saveResult = await model.save(tf.io.http( + * 'http://model-server:5000/upload', {requestInit: {method: 'PUT'}})); + * console.log(saveResult); + * ``` + * + * If the default `POST` method is to be used, without any custom parameters + * such as headers, you can simply pass an HTTP or HTTPS URL to `model.save`: + * + * ```js + * const saveResult = await model.save('http://model-server:5000/upload'); + * ``` + * + * The following GitHub Gist + * https://gist.github.com/dsmilkov/1b6046fd6132d7408d5257b0976f7864 + * implements a server based on [flask](https://github.com/pallets/flask) that + * can receive the request. Upon receiving the model artifacts via the requst, + * this particular server reconsistutes instances of [Keras + * Models](https://keras.io/models/model/) in memory. + * + * + * @param path A URL path to the model. + * Can be an absolute HTTP path (e.g., + * 'http://localhost:8000/model-upload)') or a relative path (e.g., + * './model-upload'). + * @param requestInit Request configurations to be used when sending + * HTTP request to server using `fetch`. It can contain fields such as + * `method`, `credentials`, `headers`, `mode`, etc. See + * https://developer.mozilla.org/en-US/docs/Web/API/Request/Request + * for more information. `requestInit` must not have a body, because the + * body will be set by TensorFlow.js. File blobs representing the model + * topology (filename: 'model.json') and the weights of the model (filename: + * 'model.weights.bin') will be appended to the body. If `requestInit` has a + * `body`, an Error will be thrown. + * @param loadOptions Optional configuration for the loading. It includes the + * following fields: + * - weightPathPrefix Optional, this specifies the path prefix for weight + * files, by default this is calculated from the path param. + * - fetchFunc Optional, custom `fetch` function. E.g., in Node.js, + * the `fetch` from node-fetch can be used here. + * - onProgress Optional, progress callback function, fired periodically + * before the load is completed. + * @returns An instance of `IOHandler`. + * + * @doc { + * heading: 'Models', + * subheading: 'Loading', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function http(path: string, loadOptions?: LoadOptions): IOHandler; + +/** **Human** library main class + * + * All methods and properties are available only as members of Human class + * + * - Configuration object definition: {@link Config} + * - Results object definition: {@link Result} + * - Possible inputs: {@link Input} + * + * @param userConfig - {@link Config} + * @returns instance of {@link Human} + */ +declare class Human { + #private; + /** Current version of Human library in *semver* format */ + version: string; + /** Current configuration + * - Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L262) + */ + config: Config; + /** Last known result of detect run + * - Can be accessed anytime after initial detection + */ + result: Result; + /** Current state of Human library + * - Can be polled to determine operations that are currently executed + * - Progresses through: 'config', 'check', 'backend', 'load', 'run:', 'idle' + */ + state: string; + /** currenty processed image tensor and canvas */ + process: { + tensor: Tensor | null; + canvas: AnyCanvas | null; + }; + /** Instance of TensorFlow/JS used by Human + * - Can be embedded or externally provided + * [TFJS API]: {@link https://js.tensorflow.org/api/latest/} + */ + tf: any; + /** Object containing environment information used for diagnostics */ + env: Env; + /** Draw helper classes that can draw detected objects on canvas using specified draw + * - canvas: draws input to canvas + * - options: are global settings for all draw operations, can be overriden for each draw method {@link DrawOptions} + * - face, body, hand, gesture, object, person: draws detected results as overlays on canvas + */ + draw: { + canvas: typeof draw.canvas; + face: typeof draw.face; + body: typeof draw.body; + hand: typeof draw.hand; + gesture: typeof draw.gesture; + object: typeof draw.object; + person: typeof draw.person; + all: typeof draw.all; + options: DrawOptions; + }; + /** Currently loaded models + * @internal + * {@link Models} + */ + models: models.Models; + /** Container for events dispatched by Human + * Possible events: + * - `create`: triggered when Human object is instantiated + * - `load`: triggered when models are loaded (explicitly or on-demand) + * - `image`: triggered when input image is processed + * - `result`: triggered when detection is complete + * - `warmup`: triggered when warmup is complete + * - `error`: triggered on some errors + */ + events: EventTarget | undefined; + /** Reference face triangualtion array of 468 points, used for triangle references between points */ + faceTriangulation: number[]; + /** Refernce UV map of 468 values, used for 3D mapping of the face mesh */ + faceUVMap: [number, number][]; + /** Performance object that contains values for all recently performed operations */ + performance: Record; + /** WebGL debug info */ + gl: Record; + /** Constructor for **Human** library that is futher used for all operations + * @param userConfig - user configuration object {@link Config} + */ + constructor(userConfig?: Partial); + /** internal function to measure tensor leaks */ + analyze: (...msg: string[]) => void; + /** Reset configuration to default values */ + reset(): void; + /** Validate current configuration schema */ + validate(userConfig?: Partial): { + reason: string; + where: string; + expected?: string; + }[]; + /** Exports face matching methods {@link match#similarity} */ + similarity: typeof match.similarity; + /** Exports face matching methods {@link match#distance} */ + distance: typeof match.distance; + /** Exports face matching methods {@link match#match} */ + match: typeof match.match; + /** Utility wrapper for performance.now() */ + now(): number; + /** Process input as return canvas and tensor + * + * @param input - any input {@link Input} + * @param getTensor - should image processing also return tensor or just canvas + * Returns object with `tensor` and `canvas` + */ + image(input: Input, getTensor?: boolean): Promise<{ + tensor: Tensor | null; + canvas: AnyCanvas | null; + }>; + /** Segmentation method takes any input and returns processed canvas with body segmentation + * - Segmentation is not triggered as part of detect process + * @param input - {@link Input} + * @param background - {@link Input} + * - Optional parameter background is used to fill the background with specific input + * Returns: + * - `data` as raw data array with per-pixel segmentation values + * - `canvas` as canvas which is input image filtered with segementation data and optionally merged with background image. canvas alpha values are set to segmentation values for easy merging + * - `alpha` as grayscale canvas that represents segmentation alpha values + */ + segmentation(input: Input, background?: Input): Promise<{ + data: number[] | Tensor; + canvas: AnyCanvas | null; + alpha: AnyCanvas | null; + }>; + /** Enhance method performs additional enhacements to face image previously detected for futher processing + * + * @param input - Tensor as provided in human.result.face[n].tensor + * @returns Tensor + */ + enhance(input: Tensor): Tensor | null; + /** Compare two input tensors for pixel simmilarity + * - use `human.image` to process any valid input and get a tensor that can be used for compare + * - when passing manually generated tensors: + * - both input tensors must be in format [1, height, width, 3] + * - if resolution of tensors does not match, second tensor will be resized to match resolution of the first tensor + * - return value is pixel similarity score normalized by input resolution and rgb channels + */ + compare(firstImageTensor: Tensor, secondImageTensor: Tensor): Promise; + /** Explicit backend initialization + * - Normally done implicitly during initial load phase + * - Call to explictly register and initialize TFJS backend without any other operations + * - Use when changing backend during runtime + */ + init(): Promise; + /** Load method preloads all configured models on-demand + * - Not explicitly required as any required model is load implicitly on it's first run + * + * @param userConfig - {@link Config} + */ + load(userConfig?: Partial): Promise; + /** emit event */ + emit: (event: string) => void; + /** Runs interpolation using last known result and returns smoothened result + * Interpolation is based on time since last known result so can be called independently + * + * @param result - {@link Result} optional use specific result set to run interpolation on + * @returns result - {@link Result} + */ + next(result?: Result): Result; + /** Warmup method pre-initializes all configured models for faster inference + * - can take significant time on startup + * - only used for `webgl` and `humangl` backends + * @param userConfig - {@link Config} + * @returns result - {@link Result} + */ + warmup(userConfig?: Partial): Promise; + /** Run detect with tensorflow profiling + * - result object will contain total exeuction time information for top-20 kernels + * - actual detection object can be accessed via `human.result` + */ + profile(input: Input, userConfig?: Partial): Promise>; + /** Main detection method + * - Analyze configuration: {@link Config} + * - Pre-process input: {@link Input} + * - Run inference for all configured models + * - Process and return result: {@link Result} + * + * @param input - {@link Input} + * @param userConfig - {@link Config} + * @returns result - {@link Result} + */ + detect(input: Input, userConfig?: Partial): Promise; +} +export { Human } +export default Human; + +/** Defines all possible image objects */ +export declare type ImageObjects = ImageData | ImageBitmap; + +/** + * Common interface for a machine learning model that can do inference. + */ +declare interface InferenceModel { + /** + * Return the array of input tensor info. + */ + readonly inputs: ModelTensorInfo[]; + /** + * Return the array of output tensor info. + */ + readonly outputs: ModelTensorInfo[]; + /** + * Execute the inference for the input tensors. + * + * @param input The input tensors, when there is single input for the model, + * inputs param should be a Tensor. For models with multiple inputs, inputs + * params should be in either Tensor[] if the input order is fixed, or + * otherwise NamedTensorMap format. + * For batch inference execution, the tensors for each input need to be + * concatenated together. For example with mobilenet, the required input shape + * is [1, 244, 244, 3], which represents the [batch, height, width, channel]. + * If we are provide a batched data of 100 images, the input tensor should be + * in the shape of [100, 244, 244, 3]. + * + * @param config Prediction configuration for specifying the batch size. + * + * @returns Inference result tensors. The output would be single Tensor if + * model has single output node, otherwise Tensor[] or NamedTensorMap[] will + * be returned for model with multiple outputs. + */ + predict(inputs: Tensor | Tensor[] | NamedTensorMap, config: ModelPredictConfig): Tensor | Tensor[] | NamedTensorMap; + /** + * Single Execute the inference for the input tensors and return activation + * values for specified output node names without batching. + * + * @param input The input tensors, when there is single input for the model, + * inputs param should be a Tensor. For models with multiple inputs, inputs + * params should be in either Tensor[] if the input order is fixed, or + * otherwise NamedTensorMap format. + * + * @param outputs string|string[]. List of output node names to retrieve + * activation from. + * + * @returns Activation values for the output nodes result tensors. The return + * type matches specified parameter outputs type. The output would be single + * Tensor if single output is specified, otherwise Tensor[] for multiple + * outputs. + */ + execute(inputs: Tensor | Tensor[] | NamedTensorMap, outputs: string | string[]): Tensor | Tensor[]; +} + +/** Defines all possible input types for **Human** detection */ +export declare type Input = Tensor | AnyCanvas | AnyImage | AnyVideo | ImageObjects | ExternalCanvas; + +declare namespace io { + export { + copyModel, + listModels, + moveModel, + removeModel, + browserFiles, + browserHTTPRequest, + concatenateArrayBuffers, + decodeWeights, + encodeWeights, + fromMemory, + getLoadHandlers, + getModelArtifactsForJSON, + getModelArtifactsInfoForJSON, + getSaveHandlers, + http, + IOHandler, + isHTTPScheme, + LoadHandler, + LoadOptions, + loadWeights, + ModelArtifacts, + ModelArtifactsInfo, + ModelJSON, + ModelStoreManager, + OnProgressCallback, + registerLoadRouter, + registerSaveRouter, + RequestDetails, + SaveConfig, + SaveHandler, + SaveResult, + TrainingConfig, + WeightGroup, + weightsLoaderFactory, + WeightsManifestConfig, + WeightsManifestEntry, + withSaveHandler + } +} + +/** + * Interface for a model import/export handler. + * + * The `save` and `load` handlers are both optional, in order to allow handlers + * that support only saving or loading. + */ +declare interface IOHandler { + save?: SaveHandler; + load?: LoadHandler; +} + +declare type IORouter = (url: string | string[], loadOptions?: LoadOptions) => IOHandler; + +/** iris gesture type */ +export declare type IrisGesture = 'facing center' | `looking ${'left' | 'right' | 'up' | 'down'}` | 'looking center'; + +declare function isHTTPScheme(url: string): boolean; + +/** + * List all models stored in registered storage mediums. + * + * For a web browser environment, the registered mediums are Local Storage and + * IndexedDB. + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Delete the model. + * await tf.io.removeModel('localstorage://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * ``` + * + * @returns A `Promise` of a dictionary mapping URLs of existing models to + * their model artifacts info. URLs include medium-specific schemes, e.g., + * 'indexeddb://my/model/1'. Model artifacts info include type of the + * model's topology, byte sizes of the topology, weights, etc. + * + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function listModels(): Promise<{ + [url: string]: ModelArtifactsInfo; +}>; + +/** Load method preloads all instance.configured models on-demand */ +declare function load(instance: Human): Promise; + +/** + * Type definition for handlers of loading operations. + */ +declare type LoadHandler = () => Promise; + +/** @innamespace io */ +declare interface LoadOptions { + /** + * RequestInit (options) for HTTP requests. + * + * For detailed information on the supported fields, see + * [https://developer.mozilla.org/en-US/docs/Web/API/Request/Request]( + * https://developer.mozilla.org/en-US/docs/Web/API/Request/Request) + */ + requestInit?: RequestInit; + /** + * Progress callback. + */ + onProgress?: OnProgressCallback; + /** + * A function used to override the `window.fetch` function. + */ + fetchFunc?: Function; + /** + * Strict loading model: whether extraneous weights or missing + * weights should trigger an `Error`. + * + * If `true`, require that the provided weights exactly match those + * required by the layers. `false` means that both extra weights + * and missing weights will be silently ignored. + * + * Default: `true`. + */ + strict?: boolean; + /** + * Path prefix for weight files, by default this is calculated from the + * path of the model JSON file. + * + * For instance, if the path to the model JSON file is + * `http://localhost/foo/model.json`, then the default path prefix will be + * `http://localhost/foo/`. If a weight file has the path value + * `group1-shard1of2` in the weight manifest, then the weight file will be + * loaded from `http://localhost/foo/group1-shard1of2` by default. However, + * if you provide a `weightPathPrefix` value of + * `http://localhost/foo/alt-weights`, then the weight file will be loaded + * from the path `http://localhost/foo/alt-weights/group1-shard1of2` instead. + */ + weightPathPrefix?: string; + /** + * Whether the module or model is to be loaded from TF Hub. + * + * Setting this to `true` allows passing a TF-Hub module URL, omitting the + * standard model file name and the query parameters. + * + * Default: `false`. + */ + fromTFHub?: boolean; + /** + * An async function to convert weight file name to URL. The weight file + * names are stored in model.json's weightsManifest.paths field. By default we + * consider weight files are colocated with the model.json file. For example: + * model.json URL: https://www.google.com/models/1/model.json + * group1-shard1of1.bin url: + * https://www.google.com/models/1/group1-shard1of1.bin + * + * With this func you can convert the weight file name to any URL. + */ + weightUrlConverter?: (weightFileName: string) => Promise; +} + +/** + * Reads a weights manifest JSON configuration, fetches the weights and + * returns them as `Tensor`s. + * + * @param manifest The weights manifest JSON. + * @param filePathPrefix The path prefix for filenames given in the manifest. + * Defaults to the empty string. + * @param weightNames The names of the weights to be fetched. + */ +declare function loadWeights(manifest: WeightsManifestConfig, filePathPrefix?: string, weightNames?: string[], requestInit?: RequestInit): Promise; + +declare namespace match { + export { + distance, + similarity, + match_2 as match, + Descriptor, + MatchOptions + } +} + +/** Matches given descriptor to a closest entry in array of descriptors + * @param descriptor - face descriptor + * @param descriptors - array of face descriptors to commpare given descriptor to + * @param options - see {@link similarity} + * Returns + * - `index` index array index where best match was found or -1 if no matches + * - {@link distance} calculated `distance` of given descriptor to the best match + * - {@link similarity} calculated normalized `similarity` of given descriptor to the best match + */ +declare function match_2(descriptor: Descriptor, descriptors: Array, options?: MatchOptions): { + index: number; + distance: number; + similarity: number; +}; + +declare type MatchOptions = { + order?: number; + threshold?: number; + multiplier?: number; + min?: number; + max?: number; +} | undefined; + +/** + * The serialized artifacts of a model, including topology and weights. + * + * The `modelTopology`, `trainingConfig`, `weightSpecs` and `weightData` fields + * of this interface are optional, in order to support topology- or weights-only + * saving and loading. + * + * Note this interface is used internally in IOHandlers. For the file format + * written to disk as `model.json`, see `ModelJSON`. + */ +declare interface ModelArtifacts { + /** + * Model topology. + * + * For Keras-style `tf.Model`s, this is a JSON object. + * For TensorFlow-style models (e.g., `SavedModel`), this is the JSON + * encoding of the `GraphDef` protocol buffer. + */ + modelTopology?: {} | ArrayBuffer; + /** + * Serialized configuration for the model's training. + */ + trainingConfig?: TrainingConfig; + /** + * Weight specifications. + * + * This corresponds to the weightsData below. + */ + weightSpecs?: WeightsManifestEntry[]; + /** + * Binary buffer for all weight values concatenated in the order specified + * by `weightSpecs`. + */ + weightData?: ArrayBuffer; + /** + * Hard-coded format name for models saved from TensorFlow.js or converted + * by TensorFlow.js Converter. + */ + format?: string; + /** + * What library is responsible for originally generating this artifact. + * + * Used for debugging purposes. E.g., 'TensorFlow.js v1.0.0'. + */ + generatedBy?: string; + /** + * What library or tool is responsible for converting the original model + * to this format, applicable only if the model is output by a converter. + * + * Used for debugging purposes. E.g., 'TensorFlow.js Converter v1.0.0'. + * + * A value of `null` means the model artifacts are generated without any + * conversion process (e.g., saved directly from a TensorFlow.js + * `tf.LayersModel` instance.) + */ + convertedBy?: string | null; + /** + * Inputs and outputs signature for saved model. + */ + signature?: {}; + /** + * User-defined metadata about the model. + */ + userDefinedMetadata?: { + [key: string]: {}; + }; + /** + * Initializer for the model. + */ + modelInitializer?: {}; +} + +declare interface ModelArtifactsInfo { + /** + * Timestamp for when the model is saved. + */ + dateSaved: Date; + /** + * TODO (cais,yassogba) consider removing GraphDef as GraphDefs now + * come in a JSON format and none of our IOHandlers support a non json + * format. We could conder replacing this with 'Binary' if we want to + * allow future handlers to save to non json formats (though they will + * probably want more information than 'Binary'). + * Type of the model topology + * + * Type of the model topology + * + * Possible values: + * - JSON: JSON config (human-readable, e.g., Keras JSON). + * - GraphDef: TensorFlow + * [GraphDef](https://www.tensorflow.org/extend/tool_developers/#graphdef) + * protocol buffer (binary). + */ + modelTopologyType: 'JSON' | 'GraphDef'; + /** + * Size of model topology (Keras JSON or GraphDef), in bytes. + */ + modelTopologyBytes?: number; + /** + * Size of weight specification or manifest, in bytes. + */ + weightSpecsBytes?: number; + /** + * Size of weight value data, in bytes. + */ + weightDataBytes?: number; +} + +/** + * The on-disk format of the `model.json` file. + * + * TF.js 1.0 always populates the optional fields when writing model.json. + * Prior versions did not provide those fields. + */ +declare interface ModelJSON { + /** + * Model topology. + * + * For Keras-style `tf.Model`s, this is a JSON object. + * For TensorFlow-style models (e.g., `SavedModel`), this is the JSON + * encoding of the `GraphDef` protocol buffer. + */ + modelTopology: {}; + /** Model training configuration. */ + trainingConfig?: TrainingConfig; + /** + * Weights manifest. + * + * The weights manifest consists of an ordered list of weight-manifest + * groups. Each weight-manifest group consists of a number of weight values + * stored in a number of paths. See the documentation of + * `WeightsManifestConfig` for more details. + */ + weightsManifest: WeightsManifestConfig; + /** + * Hard-coded format name for models saved from TensorFlow.js or converted + * by TensorFlow.js Converter. + */ + format?: string; + /** + * What library is responsible for originally generating this artifact. + * + * Used for debugging purposes. E.g., 'TensorFlow.js v1.0.0'. + */ + generatedBy?: string; + /** + * What library or tool is responsible for converting the original model + * to this format, applicable only if the model is output by a converter. + * + * Used for debugging purposes. E.g., 'TensorFlow.js Converter v1.0.0'. + * + * A value of `null` means the model artifacts are generated without any + * conversion process (e.g., saved directly from a TensorFlow.js + * `tf.LayersModel` instance.) + */ + convertedBy?: string | null; + /** + * Inputs and outputs signature for saved model. + */ + signature?: {}; + /** + * User-defined metadata about the model. + */ + userDefinedMetadata?: { + [key: string]: {}; + }; + /** + * Initializer for the model. + */ + modelInitializer?: {}; +} + +declare interface ModelPredictConfig { + /** + * Optional. Batch size (Integer). If unspecified, it will default to 32. + */ + batchSize?: number; + /** + * Optional. Verbosity mode. Defaults to false. + */ + verbose?: boolean; +} + +/** Instances of all possible TFJS Graph Models used by Human + * - loaded as needed based on configuration + * - initialized explictly with `human.load()` method + * - initialized implicity on first call to `human.detect()` + * - each model can be `null` if not loaded, instance of `GraphModel` if loaded or `Promise` if loading + */ +export declare class Models { + ssrnetage: null | GraphModel | Promise; + gear: null | GraphModel | Promise; + blazeposedetect: null | GraphModel | Promise; + blazepose: null | GraphModel | Promise; + centernet: null | GraphModel | Promise; + efficientpose: null | GraphModel | Promise; + mobilefacenet: null | GraphModel | Promise; + emotion: null | GraphModel | Promise; + facedetect: null | GraphModel | Promise; + faceiris: null | GraphModel | Promise; + facemesh: null | GraphModel | Promise; + faceres: null | GraphModel | Promise; + ssrnetgender: null | GraphModel | Promise; + handpose: null | GraphModel | Promise; + handskeleton: null | GraphModel | Promise; + handtrack: null | GraphModel | Promise; + liveness: null | GraphModel | Promise; + movenet: null | GraphModel | Promise; + nanodet: null | GraphModel | Promise; + posenet: null | GraphModel | Promise; + segmentation: null | GraphModel | Promise; + antispoof: null | GraphModel | Promise; +} + +declare namespace models { + export { + reset, + load, + validate, + Models + } +} + +/** + * An interface for the manager of a model store. + * + * A model store is defined as a storage medium on which multiple models can + * be stored. Each stored model has a unique `path` as its identifier. + * A `ModelStoreManager` for the store allows actions including + * + * - Listing the models stored in the store. + * - Deleting a model from the store. + */ +declare interface ModelStoreManager { + /** + * List all models in the model store. + * + * @returns A dictionary mapping paths of existing models to their + * model artifacts info. Model artifacts info include type of the model's + * topology, byte sizes of the topology, weights, etc. + */ + listModels(): Promise<{ + [path: string]: ModelArtifactsInfo; + }>; + /** + * Remove a model specified by `path`. + * + * @param path + * @returns ModelArtifactsInfo of the deleted model (if and only if deletion + * is successful). + * @throws Error if deletion fails, e.g., if no model exists at `path`. + */ + removeModel(path: string): Promise; +} + +/** + * Interface for model input/output tensor info. + */ +declare interface ModelTensorInfo { + name: string; + shape?: number[]; + dtype: DataType; + tfDtype?: string; +} + +/** + * Move a model from one URL to another. + * + * This function supports: + * + * 1. Moving within a storage medium, e.g., + * `tf.io.moveModel('localstorage://model-1', 'localstorage://model-2')` + * 2. Moving between two storage mediums, e.g., + * `tf.io.moveModel('localstorage://model-1', 'indexeddb://model-1')` + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Move the model, from Local Storage to IndexedDB. + * await tf.io.moveModel( + * 'localstorage://demo/management/model1', + * 'indexeddb://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Remove the moved model. + * await tf.io.removeModel('indexeddb://demo/management/model1'); + * ``` + * + * @param sourceURL Source URL of moving. + * @param destURL Destination URL of moving. + * @returns ModelArtifactsInfo of the copied model (if and only if copying + * is successful). + * @throws Error if moving fails, e.g., if no model exists at `sourceURL`, or + * if `oldPath` and `newPath` are identical. + * + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function moveModel(sourceURL: string, destURL: string): Promise; + +declare interface NamedTensor { + name: string; + tensor: Tensor; +} + +/** @docalias {[name: string]: Tensor} */ +declare type NamedTensorMap = { + [name: string]: Tensor; +}; + +declare type NamedTensorsMap = { + [key: string]: Tensor[]; +}; + +declare type NumericDataType = 'float32' | 'int32' | 'bool' | 'complex64'; + +/** draw detected objects */ +declare function object(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +/** Configures all object detection specific options */ +export declare interface ObjectConfig extends GenericConfig { + /** minimum confidence for a detected objects before results are discarded */ + minConfidence: number; + /** minimum overlap between two detected objects before one is discarded */ + iouThreshold: number; + /** maximum number of detected objects */ + maxDetected: number; +} + +/** Object results */ +export declare interface ObjectResult { + /** object id */ + id: number; + /** object detection score */ + score: number; + /** detected object class id */ + class: number; + /** detected object class name */ + label: ObjectType; + /** detected object box */ + box: Box; + /** detected object box normalized to 0..1 */ + boxRaw: Box; +} + +export declare type ObjectType = 'person' | 'bicycle' | 'car' | 'motorcycle' | 'airplane' | 'bus' | 'train' | 'truck' | 'boat' | 'traffic light' | 'fire hydrant' | 'stop sign' | 'parking meter' | 'bench' | 'bird' | 'cat' | 'dog' | 'horse' | 'sheep' | 'cow' | 'elephant' | 'bear' | 'zebra' | 'giraffe' | 'backpack' | 'umbrella' | 'handbag' | 'tie' | 'suitcase' | 'frisbee' | 'skis' | 'snowboard' | 'sports ball' | 'kite' | 'baseball bat' | 'baseball glove' | 'skateboard' | 'surfboard' | 'tennis racket' | 'bottle' | 'wine glass' | 'cup' | 'fork' | 'knife' | 'spoon' | 'bowl' | 'banana' | 'apple' | 'sandwich' | 'orange' | 'broccoli' | 'carrot' | 'hot dog' | 'pizza' | 'donut' | 'cake' | 'chair' | 'couch' | 'potted plant' | 'bed' | 'dining table' | 'toilet' | 'tv' | 'laptop' | 'mouse' | 'remote' | 'keyboard' | 'cell phone' | 'microwave' | 'oven' | 'toaster' | 'sink' | 'refrigerator' | 'book' | 'clock' | 'vase' | 'scissors' | 'teddy bear' | 'hair drier' | 'toothbrush'; + +/** + * Callback for the progress of a long-running action such as an HTTP + * request for a large binary object. + * + * `fraction` should be a number in the [0, 1] interval, indicating how + * much of the action has completed. + */ +declare type OnProgressCallback = (fraction: number) => void; + +/** currently set draw options {@link DrawOptions} */ +declare const options: DrawOptions; + +/** draw combined person results instead of individual detection result objects */ +declare function person(inCanvas: AnyCanvas, result: Array, drawOptions?: Partial): Promise; + +/** Person getter + * - Triggers combining all individual results into a virtual person object + */ +export declare interface PersonResult { + /** person id */ + id: number; + /** face result that belongs to this person */ + face: FaceResult; + /** body result that belongs to this person */ + body: BodyResult | null; + /** left and right hand results that belong to this person */ + hands: { + left: HandResult | null; + right: HandResult | null; + }; + /** detected gestures specific to this person */ + gestures: Array; + /** box that defines the person */ + box: Box; + /** box that defines the person normalized to 0..1 */ + boxRaw?: Box; +} + +/** generic point as [x, y, z?] */ +export declare type Point = [number, number, number?]; + +export declare type Race = 'white' | 'black' | 'asian' | 'indian' | 'other'; + +export declare enum Rank { + R0 = "R0", + R1 = "R1", + R2 = "R2", + R3 = "R3", + R4 = "R4", + R5 = "R5", + R6 = "R6" +} + +declare interface RecursiveArray { + [index: number]: T | RecursiveArray; +} + +declare const registerLoadRouter: (loudRouter: IORouter) => void; + +declare const registerSaveRouter: (loudRouter: IORouter) => void; + +/** + * Remove a model specified by URL from a reigstered storage medium. + * + * ```js + * // First create and save a model. + * const model = tf.sequential(); + * model.add(tf.layers.dense( + * {units: 1, inputShape: [10], activation: 'sigmoid'})); + * await model.save('localstorage://demo/management/model1'); + * + * // Then list existing models. + * console.log(JSON.stringify(await tf.io.listModels())); + * + * // Delete the model. + * await tf.io.removeModel('localstorage://demo/management/model1'); + * + * // List models again. + * console.log(JSON.stringify(await tf.io.listModels())); + * ``` + * + * @param url A URL to a stored model, with a scheme prefix, e.g., + * 'localstorage://my-model-1', 'indexeddb://my/model/2'. + * @returns ModelArtifactsInfo of the deleted model (if and only if deletion + * is successful). + * @throws Error if deletion fails, e.g., if no model exists at `path`. + * + * @doc { + * heading: 'Models', + * subheading: 'Management', + * namespace: 'io', + * ignoreCI: true + * } + */ +declare function removeModel(url: string): Promise; + +/** + * Additional options for Platform.fetch + */ +declare interface RequestDetails { + /** + * Is this request for a binary file (as opposed to a json file) + */ + isBinary?: boolean; +} + +declare function reset(instance: Human): void; + +/** + * Result interface definition for **Human** library + * + * Contains all possible detection results + */ +export declare interface Result { + /** {@link FaceResult}: detection & analysis results */ + face: Array; + /** {@link BodyResult}: detection & analysis results */ + body: Array; + /** {@link HandResult}: detection & analysis results */ + hand: Array; + /** {@link GestureResult}: detection & analysis results */ + gesture: Array; + /** {@link ObjectResult}: detection & analysis results */ + object: Array; + /** global performance object with timing values for each operation */ + performance: Record; + /** optional processed canvas that can be used to draw input on screen */ + canvas?: AnyCanvas | null; + /** timestamp of detection representing the milliseconds elapsed since the UNIX epoch */ + readonly timestamp: number; + /** getter property that returns unified persons object */ + persons: Array; + /** Last known error message */ + error: string | null; +} + +/** + * Options for saving a model. + * @innamespace io + */ +declare interface SaveConfig { + /** + * Whether to save only the trainable weights of the model, ignoring the + * non-trainable ones. + */ + trainableOnly?: boolean; + /** + * Whether the optimizer will be saved (if exists). + * + * Default: `false`. + */ + includeOptimizer?: boolean; +} + +/** + * Type definition for handlers of saving operations. + */ +declare type SaveHandler = (modelArtifact: ModelArtifacts) => Promise; + +/** + * Result of a saving operation. + */ +declare interface SaveResult { + /** + * Information about the model artifacts saved. + */ + modelArtifactsInfo: ModelArtifactsInfo; + /** + * HTTP responses from the server that handled the model-saving request (if + * any). This is applicable only to server-based saving routes. + */ + responses?: Response[]; + /** + * Error messages and related data (if any). + */ + errors?: Array<{} | string>; +} + +/** Configures all body segmentation module + * removes background from input containing person + * if segmentation is enabled it will run as preprocessing task before any other model + * alternatively leave it disabled and use it on-demand using human.segmentation method which can + * remove background or replace it with user-provided background + */ +export declare interface SegmentationConfig extends GenericConfig { + /** blur segmentation output by pixels for more realistic image */ + blur: number; +} + +/** + * @license + * Copyright 2017 Google LLC. All Rights Reserved. + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + * ============================================================================= + */ +/// +/** @docalias number[] */ +declare interface ShapeMap { + R0: number[]; + R1: [number]; + R2: [number, number]; + R3: [number, number, number]; + R4: [number, number, number, number]; + R5: [number, number, number, number, number]; + R6: [number, number, number, number, number, number]; +} + +/** Calculates normalized similarity between two face descriptors based on their `distance` + * @param options - calculation options + * - order - algorithm to use + * Euclidean distance if `order` is 2 (default), Minkowski distance algorithm of nth order if `order` is higher than 2 + * - multiplier - by how much to enhance difference analysis in range of 1..100 + * default is 20 which normalizes results to similarity above 0.5 can be considered a match + * - min - normalize similarity result to a given range + * - max - normalzie similarity resutl to a given range + * default is 0.2...0.8 + * Returns similarity between two face descriptors normalized to 0..1 range where 0 is no similarity and 1 is perfect similarity + */ +declare function similarity(descriptor1: Descriptor, descriptor2: Descriptor, options?: MatchOptions): number; + +declare interface SingleValueMap { + bool: boolean; + int32: number; + float32: number; + complex64: number; + string: string; +} + +export declare namespace Tensor { } + +/** + * A `tf.Tensor` object represents an immutable, multidimensional array of + * numbers that has a shape and a data type. + * + * For performance reasons, functions that create tensors do not necessarily + * perform a copy of the data passed to them (e.g. if the data is passed as a + * `Float32Array`), and changes to the data will change the tensor. This is not + * a feature and is not supported. To avoid this behavior, use the tensor before + * changing the input data or create a copy with `copy = tf.add(yourTensor, 0)`. + * + * See `tf.tensor` for details on how to create a `tf.Tensor`. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ +export declare class Tensor { + /** Unique id of this tensor. */ + readonly id: number; + /** + * Id of the bucket holding the data for this tensor. Multiple arrays can + * point to the same bucket (e.g. when calling array.reshape()). + */ + dataId: DataId; + /** The shape of the tensor. */ + readonly shape: ShapeMap[R]; + /** Number of elements in the tensor. */ + readonly size: number; + /** The data type for the array. */ + readonly dtype: DataType; + /** The rank type for the array (see `Rank` enum). */ + readonly rankType: R; + /** Whether this tensor has been globally kept. */ + kept: boolean; + /** The id of the scope this tensor is being tracked in. */ + scopeId: number; + /** + * Number of elements to skip in each dimension when indexing. See + * https://docs.scipy.org/doc/numpy/reference/generated/\ + * numpy.ndarray.strides.html + */ + readonly strides: number[]; + constructor(shape: ShapeMap[R], dtype: DataType, dataId: DataId, id: number); + readonly rank: number; + /** + * Returns a promise of `tf.TensorBuffer` that holds the underlying data. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + buffer(): Promise>; + /** + * Returns a `tf.TensorBuffer` that holds the underlying data. + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + bufferSync(): TensorBuffer; + /** + * Returns the tensor data as a nested array. The transfer of data is done + * asynchronously. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + array(): Promise; + /** + * Returns the tensor data as a nested array. The transfer of data is done + * synchronously. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + arraySync(): ArrayMap[R]; + /** + * Asynchronously downloads the values from the `tf.Tensor`. Returns a + * promise of `TypedArray` that resolves when the computation has finished. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + data(): Promise; + /** + * Copy the tensor's data to a new GPU resource. Comparing to the `dataSync()` + * and `data()`, this method prevents data from being downloaded to CPU. + * + * For WebGL backend, the data will be stored on a densely packed texture. + * This means that the texture will use the RGBA channels to store value. + * + * @param options: + * For WebGL, + * - customTexShape: Optional. If set, will use the user defined + * texture shape to create the texture. + * + * @returns For WebGL backend, a GPUData contains the new texture and + * its information. + * { + * tensorRef: The tensor that is associated with this texture, + * texture: WebGLTexture, + * texShape: [number, number] // [height, width] + * } + * Remember to dispose the GPUData after it is used by + * `res.tensorRef.dispose()`. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + dataToGPU(options?: DataToGPUOptions): GPUData; + /** + * Synchronously downloads the values from the `tf.Tensor`. This blocks the + * UI thread until the values are ready, which can cause performance issues. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + dataSync(): DataTypeMap[D]; + /** Returns the underlying bytes of the tensor's data. */ + bytes(): Promise; + /** + * Disposes `tf.Tensor` from memory. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + dispose(): void; + protected isDisposedInternal: boolean; + readonly isDisposed: boolean; + throwIfDisposed(): void; + /** + * Prints the `tf.Tensor`. See `tf.print` for details. + * + * @param verbose Whether to print verbose information about the tensor, + * including dtype and size. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + print(verbose?: boolean): void; + /** + * Returns a copy of the tensor. See `tf.clone` for details. + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + clone(this: T): T; + /** + * Returns a human-readable description of the tensor. Useful for logging. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + toString(verbose?: boolean): string; + variable(trainable?: boolean, name?: string, dtype?: DataType): Variable; +} + +/** + * A mutable object, similar to `tf.Tensor`, that allows users to set values + * at locations before converting to an immutable `tf.Tensor`. + * + * See `tf.buffer` for creating a tensor buffer. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ +declare class TensorBuffer { + dtype: D; + size: number; + shape: ShapeMap[R]; + strides: number[]; + values: DataTypeMap[D]; + constructor(shape: ShapeMap[R], dtype: D, values?: DataTypeMap[D]); + /** + * Sets a value in the buffer at a given location. + * + * @param value The value to set. + * @param locs The location indices. + * + * @doc {heading: 'Tensors', subheading: 'Creation'} + */ + set(value: SingleValueMap[D], ...locs: number[]): void; + /** + * Returns the value in the buffer at the provided location. + * + * @param locs The location indices. + * + * @doc {heading: 'Tensors', subheading: 'Creation'} + */ + get(...locs: number[]): SingleValueMap[D]; + locToIndex(locs: number[]): number; + indexToLoc(index: number): number[]; + readonly rank: number; + /** + * Creates an immutable `tf.Tensor` object from the buffer. + * + * @doc {heading: 'Tensors', subheading: 'Creation'} + */ + toTensor(): Tensor; +} + +declare interface TensorInfo { + name: string; + shape?: number[]; + dtype: DataType; +} + +/** @docalias TypedArray|Array */ +export declare type TensorLike = TypedArray | number | boolean | string | RecursiveArray | RecursiveArray | RecursiveArray | Uint8Array[]; + +/** Model training configuration. */ +declare interface TrainingConfig { + /** Optimizer used for the model training. */ + optimizer_config: {}; + /** Loss function(s) for the model's output(s). */ + loss: string | string[] | { + [key: string]: string; + }; + /** Metric function(s) for the model's output(s). */ + metrics?: string[] | { + [key: string]: string; + }; + weighted_metrics?: string[]; + sample_weight_mode?: string; + loss_weights?: number[] | { + [key: string]: number; + }; +} + +declare type TypedArray = Float32Array | Int32Array | Uint8Array; + +declare function validate(instance: Human): Promise; + +/** + * A mutable `tf.Tensor`, useful for persisting state, e.g. for training. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ +declare class Variable extends Tensor { + trainable: boolean; + name: string; + constructor(initialValue: Tensor, trainable: boolean, name: string, tensorId: number); + /** + * Assign a new `tf.Tensor` to this variable. The new `tf.Tensor` must have + * the same shape and dtype as the old `tf.Tensor`. + * + * @param newValue New tensor to be assigned to this variable. + * + * @doc {heading: 'Tensors', subheading: 'Classes'} + */ + assign(newValue: Tensor): void; + dispose(): void; +} + +/** Possible values for `human.warmup` */ +export declare type WarmupType = ['' | 'none' | 'face' | 'full' | 'body']; + +/** + * Group to which the weight belongs. + * + * - 'optimizer': Weight from a stateful optimizer. + */ +declare type WeightGroup = 'model' | 'optimizer'; + +/** + * Creates a function, which reads a weights manifest JSON configuration, + * fetches the weight files using the specified function and returns them as + * `Tensor`s. + * + * ```js + * // example for creating a nodejs weight loader, which reads the weight files + * // from disk using fs.readFileSync + * + * import * as fs from 'fs' + * + * const fetchWeightsFromDisk = (filePaths: string[]) => + * filePaths.map(filePath => fs.readFileSync(filePath).buffer) + * + * const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk) + * + * const manifest = JSON.parse( + * fs.readFileSync('./my_model-weights_manifest').toString() + * ) + * const weightMap = await loadWeights(manifest, './') + * ``` + * @param fetchWeightsFunction The function used for fetching the weight files. + * @returns Weight loading function. + */ +declare function weightsLoaderFactory(fetchWeightsFunction: (fetchUrls: string[]) => Promise): (manifest: WeightsManifestConfig, filePathPrefix?: string, weightNames?: string[]) => Promise; + +/** + * A weight manifest. + * + * The weight manifest consists of an ordered list of weight-manifest groups. + * Each weight-manifest group ("group" for short hereafter) consists of a + * number of weight values stored in a number of paths. + * See the documentation of `WeightManifestGroupConfig` below for more details. + */ +declare type WeightsManifestConfig = WeightsManifestGroupConfig[]; + +/** + * An entry in the weight manifest. + * + * The entry contains specification of a weight. + */ +declare interface WeightsManifestEntry { + /** + * Name of the weight, e.g., 'Dense_1/bias' + */ + name: string; + /** + * Shape of the weight. + */ + shape: number[]; + /** + * Data type of the weight. + */ + dtype: 'float32' | 'int32' | 'bool' | 'string' | 'complex64'; + /** + * Type of the weight. + * + * Optional. + * + * The value 'optimizer' indicates the weight belongs to an optimizer + * (i.e., used only during model training and not during inference). + */ + group?: WeightGroup; + /** + * Information for dequantization of the weight. + */ + quantization?: { + scale?: number; + min?: number; + dtype: 'uint16' | 'uint8' | 'float16'; + }; +} + +/** + * A weight-manifest group. + * + * Consists of an ordered list of weight values encoded in binary format, + * stored in an ordered list of paths. + */ +declare interface WeightsManifestGroupConfig { + /** + * An ordered list of paths. + * + * Paths are intentionally abstract in order to be general. For example, they + * can be relative URL paths or relative paths on the file system. + */ + paths: string[]; + /** + * Specifications of the weights stored in the paths. + */ + weights: WeightsManifestEntry[]; +} + +/** + * Creates an IOHandler that passes saved model artifacts to a callback. + * + * ```js + * function handleSave(artifacts) { + * // ... do something with the artifacts ... + * return {modelArtifactsInfo: {...}, ...}; + * } + * + * const saveResult = model.save(tf.io.withSaveHandler(handleSave)); + * ``` + * + * @param saveHandler A function that accepts a `ModelArtifacts` and returns a + * `SaveResult`. + */ +declare function withSaveHandler(saveHandler: (artifacts: ModelArtifacts) => Promise): IOHandler; + +export { } diff --git a/dist/human.esm.js b/dist/human.esm.js new file mode 100644 index 00000000..b4ce3cc5 --- /dev/null +++ b/dist/human.esm.js @@ -0,0 +1,51497 @@ +/* + Human + homepage: + author: ' +*/ + +var __defProp = Object.defineProperty; +var __defNormalProp = (obj, key, value) => key in obj ? __defProp(obj, key, { enumerable: true, configurable: true, writable: true, value }) : obj[key] = value; +var __export = (target, all2) => { + for (var name in all2) + __defProp(target, name, { get: all2[name], enumerable: true }); +}; +var __publicField = (obj, key, value) => { + __defNormalProp(obj, typeof key !== "symbol" ? key + "" : key, value); + return value; +}; +var __accessCheck = (obj, member, msg) => { + if (!member.has(obj)) + throw TypeError("Cannot " + msg); +}; +var __privateGet = (obj, member, getter) => { + __accessCheck(obj, member, "read from private field"); + return getter ? getter.call(obj) : member.get(obj); +}; +var __privateAdd = (obj, member, value) => { + if (member.has(obj)) + throw TypeError("Cannot add the same private member more than once"); + member instanceof WeakSet ? member.add(obj) : member.set(obj, value); +}; +var __privateSet = (obj, member, value, setter) => { + __accessCheck(obj, member, "write to private field"); + setter ? setter.call(obj, value) : member.set(obj, value); + return value; +}; + +// src/util/util.ts +function log(...msg) { + const dt2 = new Date(); + const ts2 = `${dt2.getHours().toString().padStart(2, "0")}:${dt2.getMinutes().toString().padStart(2, "0")}:${dt2.getSeconds().toString().padStart(2, "0")}.${dt2.getMilliseconds().toString().padStart(3, "0")}`; + if (msg) + console.log(ts2, "Human:", ...msg); +} +function join(folder, file) { + const separator = folder.endsWith("/") ? "" : "/"; + const skipJoin = file.startsWith(".") || file.startsWith("/") || file.startsWith("http:") || file.startsWith("https:") || file.startsWith("file:"); + const path = skipJoin ? `${file}` : `${folder}${separator}${file}`; + if (!path.toLocaleLowerCase().includes(".json")) + throw new Error(`modelpath error: expecting json file: ${path}`); + return path; +} +var now = () => { + if (typeof performance !== "undefined") + return performance.now(); + return parseInt((Number(process.hrtime.bigint()) / 1e3 / 1e3).toString()); +}; +function validate(defaults, config3, parent = "config", msgs = []) { + for (const key of Object.keys(config3)) { + if (typeof config3[key] === "object") { + validate(defaults[key], config3[key], key, msgs); + } else { + const defined = defaults && typeof defaults[key] !== "undefined"; + if (!defined) + msgs.push({ reason: "unknown property", where: `${parent}.${key} = ${config3[key]}` }); + const same = defaults && typeof defaults[key] === typeof config3[key]; + if (defined && !same) + msgs.push({ reason: "property type mismatch", where: `${parent}.${key} = ${config3[key]}`, expected: typeof defaults[key] }); + } + } + if (config3.debug && parent === "config" && msgs.length > 0) + log("invalid configuration", msgs); + return msgs; +} +function mergeDeep(...objects) { + const isObject = (obj) => obj && typeof obj === "object"; + return objects.reduce((prev, obj) => { + Object.keys(obj || {}).forEach((key) => { + const pVal = prev[key]; + const oVal = obj[key]; + if (Array.isArray(pVal) && Array.isArray(oVal)) + prev[key] = pVal.concat(...oVal); + else if (isObject(pVal) && isObject(oVal)) + prev[key] = mergeDeep(pVal, oVal); + else + prev[key] = oVal; + }); + return prev; + }, {}); +} + +// src/config.ts +var config = { + backend: "", + modelBasePath: "", + cacheModels: true, + wasmPath: "", + debug: true, + async: true, + warmup: "full", + cacheSensitivity: 0.7, + skipAllowed: false, + deallocate: false, + filter: { + enabled: true, + equalization: false, + width: 0, + height: 0, + flip: false, + return: true, + brightness: 0, + contrast: 0, + sharpness: 0, + blur: 0, + saturation: 0, + hue: 0, + negative: false, + sepia: false, + vintage: false, + kodachrome: false, + technicolor: false, + polaroid: false, + pixelate: 0 + }, + gesture: { + enabled: true + }, + face: { + enabled: true, + detector: { + modelPath: "blazeface.json", + rotation: true, + maxDetected: 1, + skipFrames: 99, + skipTime: 2500, + minConfidence: 0.2, + iouThreshold: 0.1, + mask: false, + return: false + }, + mesh: { + enabled: true, + modelPath: "facemesh.json" + }, + iris: { + enabled: true, + modelPath: "iris.json" + }, + emotion: { + enabled: true, + minConfidence: 0.1, + skipFrames: 99, + skipTime: 1500, + modelPath: "emotion.json" + }, + description: { + enabled: true, + modelPath: "faceres.json", + skipFrames: 99, + skipTime: 3e3, + minConfidence: 0.1 + }, + antispoof: { + enabled: false, + skipFrames: 99, + skipTime: 4e3, + modelPath: "antispoof.json" + }, + liveness: { + enabled: false, + skipFrames: 99, + skipTime: 4e3, + modelPath: "liveness.json" + } + }, + body: { + enabled: true, + modelPath: "movenet-lightning.json", + maxDetected: -1, + minConfidence: 0.3, + skipFrames: 1, + skipTime: 200 + }, + hand: { + enabled: true, + rotation: true, + skipFrames: 99, + skipTime: 1e3, + minConfidence: 0.5, + iouThreshold: 0.2, + maxDetected: -1, + landmarks: true, + detector: { + modelPath: "handtrack.json" + }, + skeleton: { + modelPath: "handlandmark-full.json" + } + }, + object: { + enabled: false, + modelPath: "mb3-centernet.json", + minConfidence: 0.2, + iouThreshold: 0.4, + maxDetected: 10, + skipFrames: 99, + skipTime: 2e3 + }, + segmentation: { + enabled: false, + modelPath: "selfie.json", + blur: 8 + } +}; + +// dist/tfjs.esm.js +var tfjs_esm_exports = {}; +__export(tfjs_esm_exports, { + Abs: () => so, + Acos: () => nl, + Acosh: () => sl, + AdadeltaOptimizer: () => Jg, + AdagradOptimizer: () => eb, + AdamOptimizer: () => tb, + AdamaxOptimizer: () => nb, + Add: () => Cr, + AddN: () => wa, + All: () => rl, + Any: () => al, + ArgMax: () => ka, + ArgMin: () => il, + Asin: () => ol, + Asinh: () => ul, + Atan: () => ll, + Atan2: () => dl, + Atanh: () => cl, + AvgPool: () => Ia, + AvgPool3D: () => Md, + AvgPool3DGrad: () => qm, + AvgPoolGrad: () => Hm, + BackendWasm: () => Bce, + BatchMatMul: () => Sa, + BatchToSpaceND: () => ro, + Bincount: () => jm, + BroadcastArgs: () => Km, + BroadcastTo: () => t$, + Callback: () => X4, + CallbackList: () => oL, + Cast: () => Ca, + Ceil: () => Na, + ClipByValue: () => Nr, + Complex: () => Ld, + ComplexAbs: () => Bd, + Concat: () => ao, + Conv2D: () => Ta, + Conv2DBackpropFilter: () => Xm, + Conv2DBackpropInput: () => $a, + Conv3D: () => Vd, + Conv3DBackpropFilterV2: () => Ym, + Conv3DBackpropInputV2: () => Qm, + Cos: () => Aa, + Cosh: () => Ea, + CropAndResize: () => oo, + Cumsum: () => io, + CustomCallback: () => cL, + DataStorage: () => Od, + DenseBincount: () => Zm, + DepthToSpace: () => uo, + DepthwiseConv2dNative: () => _a, + DepthwiseConv2dNativeBackpropFilter: () => Jm, + DepthwiseConv2dNativeBackpropInput: () => eg, + Diag: () => tg, + Dilation2D: () => Wd, + Dilation2DBackpropFilter: () => zf, + Dilation2DBackpropInput: () => Pf, + ENV: () => Cw, + EarlyStopping: () => Y4, + Einsum: () => Ud, + Elu: () => Da, + EluGrad: () => ng, + Environment: () => XT, + Equal: () => lo, + Erf: () => pl, + Exp: () => Fa, + ExpandDims: () => co, + Expm1: () => po, + FFT: () => sg, + Fill: () => hl, + FlipLeftRight: () => ho, + Floor: () => Oa, + FloorDiv: () => Pa, + FromPixels: () => id, + FusedBatchNorm: () => za, + FusedConv2D: () => ra, + FusedDepthwiseConv2D: () => aa, + GPGPUContext: () => Ff, + GatherNd: () => mo, + GatherV2: () => fo, + GraphModel: () => tW, + Greater: () => go, + GreaterEqual: () => Ma, + History: () => lL, + IFFT: () => rg, + Identity: () => La, + Imag: () => Gd, + InputSpec: () => Rt, + IsFinite: () => fl, + IsInf: () => ml, + IsNan: () => gl, + KernelBackend: () => tl, + LRN: () => qd, + LRNGrad: () => ig, + LayerVariable: () => eL, + LayersModel: () => mr, + LeakyRelu: () => Ba, + Less: () => bo, + LessEqual: () => yo, + LinSpace: () => ag, + Log: () => Va, + Log1p: () => bl, + LogSoftmax: () => n$, + LogicalAnd: () => vo, + LogicalNot: () => yl, + LogicalOr: () => Hd, + MathBackendCPU: () => cS, + MathBackendWebGL: () => uC, + Max: () => Wa, + MaxPool: () => Ga, + MaxPool3D: () => jd, + MaxPool3DGrad: () => ug, + MaxPoolGrad: () => og, + MaxPoolWithArgmax: () => lg, + Maximum: () => Ua, + Mean: () => Ha, + Min: () => qa, + Minimum: () => ja, + MirrorPad: () => Ka, + Mod: () => vl, + MomentumOptimizer: () => sb, + Multinomial: () => cg, + Multiply: () => Xa, + Neg: () => xo, + NonMaxSuppressionV3: () => ko, + NonMaxSuppressionV4: () => xl, + NonMaxSuppressionV5: () => Io, + NotEqual: () => wo, + OP_SCOPE_SUFFIX: () => F$, + OneHot: () => Co, + OnesLike: () => So, + Optimizer: () => _r, + OptimizerConstructors: () => Wr, + Pack: () => No, + PadV2: () => Ya, + Pool: () => qce, + Pow: () => Qa, + Prelu: () => Za, + Prod: () => To, + RMSPropOptimizer: () => rb, + RNN: () => Rr, + Range: () => wl, + Rank: () => C$, + Real: () => Kd, + RealDiv: () => Ra, + Reciprocal: () => kl, + Reduction: () => EF, + Relu: () => Ja, + Relu6: () => ti, + Reshape: () => $o, + ResizeBilinear: () => ei, + ResizeBilinearGrad: () => pg, + ResizeNearestNeighbor: () => Il, + ResizeNearestNeighborGrad: () => dg, + Reverse: () => Ao, + RotateWithOffset: () => Uo, + Round: () => Eo, + Rsqrt: () => ni, + SGDOptimizer: () => fp, + ScatterNd: () => _o, + Select: () => Ro, + Selu: () => Sl, + Sequential: () => $b, + Sigmoid: () => ri, + Sign: () => Cl, + Sin: () => si, + Sinh: () => Fo, + Slice: () => Do, + Softmax: () => oi, + Softplus: () => Nl, + SpaceToBatchND: () => Oo, + SparseFillEmptyRows: () => Xd, + SparseReshape: () => Tl, + SparseSegmentMean: () => Yd, + SparseSegmentSum: () => Qd, + SparseToDense: () => Zd, + SplitV: () => Po, + Sqrt: () => ai, + Square: () => $l, + SquaredDifference: () => ui, + Step: () => pi, + StridedSlice: () => zo, + StringNGrams: () => Jd, + StringSplit: () => hg, + StringToHashBucketFast: () => fg, + Sub: () => li, + Sum: () => ii, + SymbolicTensor: () => Ns, + Tan: () => Mo, + Tanh: () => ci, + Tensor: () => tt, + TensorBuffer: () => Vt, + Tile: () => Tr, + TopK: () => Lo, + Transform: () => Bo, + Transpose: () => di, + Unique: () => mg, + Unpack: () => Vo, + UnsortedSegmentSum: () => ep, + Variable: () => ud, + ZerosLike: () => Wo, + _FusedMatMul: () => sa, + abs: () => Mt, + acos: () => mE, + acosh: () => bE, + add: () => oe, + addN: () => vE, + all: () => yk, + any: () => Zf, + argMax: () => Gu, + argMin: () => SE, + asin: () => NE, + asinh: () => $E, + atan: () => EE, + atan2: () => RE, + atanh: () => FE, + avgPool: () => Ag, + avgPool3d: () => kk, + backend: () => uE, + backend_util: () => N, + basicLSTMCell: () => cde, + batchNorm: () => qu, + batchNorm2d: () => JE, + batchNorm3d: () => t_, + batchNorm4d: () => s_, + batchToSpaceND: () => Eg, + bincount: () => Ik, + booleanMaskAsync: () => Mde, + broadcastArgs: () => i_, + broadcastTo: () => Kc, + broadcast_util: () => Go, + browser: () => Zw, + buffer: () => _e, + callbacks: () => Kde, + cast: () => pe, + ceil: () => l_, + clipByValue: () => Vn, + clone: () => pr, + complex: () => ia, + concat: () => Dt, + concat1d: () => p_, + concat2d: () => f_, + concat3d: () => g_, + concat4d: () => y_, + constraints: () => uM, + conv1d: () => Sk, + conv2d: () => la, + conv2dTranspose: () => Ck, + conv3d: () => Nk, + conv3dTranspose: () => N_, + copyRegisteredKernels: () => Xce, + cos: () => Rg, + cosh: () => $k, + cosineWindow: () => Jk, + cumsum: () => Ak, + customGrad: () => qs, + data: () => nW, + denseBincount: () => __, + deprecationWarn: () => gk, + depthToSpace: () => D_, + depthwiseConv2d: () => rp, + deregisterOp: () => Yde, + device_util: () => Rl, + diag: () => dde, + dilation2d: () => z_, + disableDeprecationWarnings: () => Jce, + dispose: () => Ee, + disposeVariables: () => ede, + div: () => Se, + divNoNan: () => W_, + dot: () => pde, + dropout: () => C3, + einsum: () => H_, + elu: () => ap, + enableDebugMode: () => Zce, + enableProdMode: () => Qce, + enclosingPowerOfTwo: () => N3, + engine: () => ks, + env: () => X, + equal: () => Yn, + erf: () => K_, + exp: () => Qn, + expandDims: () => Pn, + expm1: () => Z_, + eye: () => Ek, + fft: () => Kg, + fill: () => Ol, + findBackend: () => ode, + findBackendFactory: () => ude, + floor: () => ip, + floorDiv: () => bk, + forceHalfFloat: () => hX, + fused: () => pa, + gather: () => ju, + gatherND: () => k3, + gather_util: () => ek, + getBackend: () => ade, + getGradient: () => Rv, + getKernel: () => Mf, + getKernelsForBackend: () => Lf, + getThreadsCount: () => ipe, + gpgpu_util: () => tK, + grad: () => mde, + grads: () => gde, + greater: () => Un, + greaterEqual: () => Ho, + ifft: () => md, + imag: () => Dg, + image: () => ls, + inTopKAsync: () => Bde, + initializers: () => gM, + input: () => aB, + io: () => An, + irfft: () => qk, + isFinite: () => hde, + isInf: () => fde, + isNaN: () => lR, + keep: () => jt, + kernel_impls: () => ys, + layers: () => ZM, + leakyRelu: () => Fg, + less: () => _k, + lessEqual: () => qo, + linalg: () => cO, + linspace: () => hR, + loadGraphModel: () => Qde, + loadLayersModel: () => qde, + localResponseNormalization: () => mR, + log: () => Zn, + log1p: () => Og, + logSigmoid: () => vde, + logSoftmax: () => Rk, + logSumExp: () => AR, + logicalAnd: () => _s, + logicalNot: () => Mg, + logicalOr: () => Pk, + logicalXor: () => xde, + losses: () => Ude, + matMul: () => Me, + math: () => _A, + max: () => $s, + maxPool: () => Lg, + maxPool3d: () => zk, + maxPoolWithArgmax: () => zR, + maximum: () => Er, + mean: () => St, + memory: () => Qf, + meshgrid: () => wde, + metrics: () => $4, + min: () => em, + minimum: () => up, + mirrorPad: () => UR, + mod: () => HR, + model: () => Gde, + models: () => G4, + moments: () => Bg, + movingAverage: () => Lde, + mul: () => V, + multiRNNCell: () => kde, + multinomial: () => YR, + neg: () => It, + nextFrame: () => pO, + norm: () => Qk, + notEqual: () => Ku, + oneHot: () => dd, + ones: () => Mn, + onesLike: () => Jn, + op: () => B, + outerProduct: () => Ide, + pad: () => hi, + pad1d: () => Sde, + pad2d: () => Cde, + pad3d: () => Nde, + pad4d: () => Tde, + pool: () => $de, + pow: () => da, + prelu: () => Wg, + print: () => fA, + prod: () => Mk, + profile: () => tde, + rand: () => Ade, + randomGamma: () => Ede, + randomNormal: () => bD, + randomUniform: () => zl, + range: () => Xu, + ready: () => rde, + real: () => hd, + reciprocal: () => wD, + registerBackend: () => sp, + registerCallbackConstructor: () => jde, + registerGradient: () => r$, + registerKernel: () => Al, + registerOp: () => Xde, + regularizers: () => H4, + relu: () => Ks, + relu6: () => Lk, + removeBackend: () => ide, + reshape: () => G, + reverse: () => es, + reverse1d: () => _de, + reverse2d: () => Rde, + reverse3d: () => Dde, + reverse4d: () => Fde, + rfft: () => Xg, + round: () => Bk, + rsqrt: () => Vk, + scalar: () => Ce, + scatterND: () => y3, + scatter_util: () => nk, + selu: () => Wk, + separableConv2d: () => DD, + sequential: () => Hde, + serialization: () => ae, + setBackend: () => sde, + setPlatform: () => lde, + setThreadsCount: () => ape, + setWasmPath: () => spe, + setWasmPaths: () => rpe, + setWebGLContext: () => qj, + setdiff1dAsync: () => OD, + shared: () => zy, + sigmoid: () => Hs, + sign: () => zD, + signal: () => Wde, + sin: () => Uk, + sinh: () => Gk, + slice: () => He, + slice1d: () => Hg, + slice2d: () => Hk, + slice3d: () => qg, + slice4d: () => fd, + slice_util: () => kt, + softmax: () => jg, + softplus: () => Pl, + spaceToBatchND: () => Vg, + sparse: () => Fc, + sparseToDense: () => Zk, + spectral: () => Vde, + split: () => Bn, + sqrt: () => rn, + square: () => ct, + squaredDifference: () => jk, + squeeze: () => yr, + stack: () => ts, + step: () => lp, + stridedSlice: () => n3, + string: () => Sf, + sub: () => be, + sum: () => ve, + sumOutType: () => np, + tan: () => r3, + tanh: () => Hu, + tensor: () => ds, + tensor1d: () => Jt, + tensor2d: () => Hi, + tensor3d: () => zA, + tensor4d: () => Ode, + tensor5d: () => Pde, + tensor6d: () => zde, + tensor_util: () => Ts, + test_util: () => eE, + tidy: () => H, + tile: () => us, + time: () => nde, + topk: () => i3, + train: () => _i, + transpose: () => qe, + truncatedNormal: () => Yg, + unique: () => Uv, + unregisterGradient: () => Kce, + unregisterKernel: () => jce, + unsortedSegmentSum: () => c3, + unstack: () => Rs, + upcastType: () => vn, + util: () => w, + valueAndGrad: () => bde, + valueAndGrads: () => yde, + variable: () => p3, + variableGrads: () => yR, + version: () => upe, + version_converter: () => Zde, + version_core: () => Yce, + version_cpu: () => Jde, + version_layers: () => DI, + version_wasm: () => ope, + version_webgl: () => epe, + webgl: () => tpe, + webgl_util: () => Hj, + webgpu: () => Cie, + where: () => xn, + whereAsync: () => Xk, + zeros: () => Tt, + zerosLike: () => je +}); +var cT = Object.create; +var Dd = Object.defineProperty; +var dT = Object.getOwnPropertyDescriptor; +var hw = Object.getOwnPropertyNames; +var pT = Object.getPrototypeOf; +var hT = Object.prototype.hasOwnProperty; +var fT = (e) => Dd(e, "__esModule", { value: true }); +var Pt = (e, t) => function() { + return t || (0, e[hw(e)[0]])((t = { exports: {} }).exports, t), t.exports; +}; +var Ae = (e, t) => { + for (var n in t) + Dd(e, n, { get: t[n], enumerable: true }); +}; +var mT = (e, t, n, s) => { + if (t && typeof t == "object" || typeof t == "function") + for (let r of hw(t)) + !hT.call(e, r) && (n || r !== "default") && Dd(e, r, { get: () => t[r], enumerable: !(s = dT(t, r)) || s.enumerable }); + return e; +}; +var va = (e, t) => mT(fT(Dd(e != null ? cT(pT(e)) : {}, "default", !t && e && e.__esModule ? { get: () => e.default, enumerable: true } : { value: e, enumerable: true })), e); +var gT = Pt({ "src/node_modules/long/src/long.js"(e, t) { + t.exports = s; + var n = null; + try { + n = new WebAssembly.Instance(new WebAssembly.Module(new Uint8Array([0, 97, 115, 109, 1, 0, 0, 0, 1, 13, 2, 96, 0, 1, 127, 96, 4, 127, 127, 127, 127, 1, 127, 3, 7, 6, 0, 1, 1, 1, 1, 1, 6, 6, 1, 127, 1, 65, 0, 11, 7, 50, 6, 3, 109, 117, 108, 0, 1, 5, 100, 105, 118, 95, 115, 0, 2, 5, 100, 105, 118, 95, 117, 0, 3, 5, 114, 101, 109, 95, 115, 0, 4, 5, 114, 101, 109, 95, 117, 0, 5, 8, 103, 101, 116, 95, 104, 105, 103, 104, 0, 0, 10, 191, 1, 6, 4, 0, 35, 0, 11, 36, 1, 1, 126, 32, 0, 173, 32, 1, 173, 66, 32, 134, 132, 32, 2, 173, 32, 3, 173, 66, 32, 134, 132, 126, 34, 4, 66, 32, 135, 167, 36, 0, 32, 4, 167, 11, 36, 1, 1, 126, 32, 0, 173, 32, 1, 173, 66, 32, 134, 132, 32, 2, 173, 32, 3, 173, 66, 32, 134, 132, 127, 34, 4, 66, 32, 135, 167, 36, 0, 32, 4, 167, 11, 36, 1, 1, 126, 32, 0, 173, 32, 1, 173, 66, 32, 134, 132, 32, 2, 173, 32, 3, 173, 66, 32, 134, 132, 128, 34, 4, 66, 32, 135, 167, 36, 0, 32, 4, 167, 11, 36, 1, 1, 126, 32, 0, 173, 32, 1, 173, 66, 32, 134, 132, 32, 2, 173, 32, 3, 173, 66, 32, 134, 132, 129, 34, 4, 66, 32, 135, 167, 36, 0, 32, 4, 167, 11, 36, 1, 1, 126, 32, 0, 173, 32, 1, 173, 66, 32, 134, 132, 32, 2, 173, 32, 3, 173, 66, 32, 134, 132, 130, 34, 4, 66, 32, 135, 167, 36, 0, 32, 4, 167, 11])), {}).exports; + } catch (F) { + } + function s(F, $, z) { + this.low = F | 0, this.high = $ | 0, this.unsigned = !!z; + } + s.prototype.__isLong__, Object.defineProperty(s.prototype, "__isLong__", { value: true }); + function r(F) { + return (F && F.__isLong__) === true; + } + s.isLong = r; + var a = {}, i = {}; + function o(F, $) { + var z, W, q; + return $ ? (F >>>= 0, (q = 0 <= F && F < 256) && (W = i[F], W) ? W : (z = c(F, (F | 0) < 0 ? -1 : 0, true), q && (i[F] = z), z)) : (F |= 0, (q = -128 <= F && F < 128) && (W = a[F], W) ? W : (z = c(F, F < 0 ? -1 : 0, false), q && (a[F] = z), z)); + } + s.fromInt = o; + function u(F, $) { + if (isNaN(F)) + return $ ? x : v; + if ($) { + if (F < 0) + return x; + if (F >= g) + return _; + } else { + if (F <= -b) + return P; + if (F + 1 >= b) + return R; + } + return F < 0 ? u(-F, $).neg() : c(F % m | 0, F / m | 0, $); + } + s.fromNumber = u; + function c(F, $, z) { + return new s(F, $, z); + } + s.fromBits = c; + var l = Math.pow; + function d(F, $, z) { + if (F.length === 0) + throw Error("empty string"); + if (F === "NaN" || F === "Infinity" || F === "+Infinity" || F === "-Infinity") + return v; + if (typeof $ == "number" ? (z = $, $ = false) : $ = !!$, z = z || 10, z < 2 || 36 < z) + throw RangeError("radix"); + var W; + if ((W = F.indexOf("-")) > 0) + throw Error("interior hyphen"); + if (W === 0) + return d(F.substring(1), $, z).neg(); + for (var q = u(l(z, 8)), K = v, j = 0; j < F.length; j += 8) { + var Q = Math.min(8, F.length - j), J = parseInt(F.substring(j, j + Q), z); + if (Q < 8) { + var ne = u(l(z, Q)); + K = K.mul(ne).add(u(J)); + } else + K = K.mul(q), K = K.add(u(J)); + } + return K.unsigned = $, K; + } + s.fromString = d; + function p(F, $) { + return typeof F == "number" ? u(F, $) : typeof F == "string" ? d(F, $) : c(F.low, F.high, typeof $ == "boolean" ? $ : F.unsigned); + } + s.fromValue = p; + var h = 1 << 16, f = 1 << 24, m = h * h, g = m * m, b = g / 2, y = o(f), v = o(0); + s.ZERO = v; + var x = o(0, true); + s.UZERO = x; + var k = o(1); + s.ONE = k; + var T = o(1, true); + s.UONE = T; + var C = o(-1); + s.NEG_ONE = C; + var R = c(-1, 2147483647, false); + s.MAX_VALUE = R; + var _ = c(-1, -1, true); + s.MAX_UNSIGNED_VALUE = _; + var P = c(0, -2147483648, false); + s.MIN_VALUE = P; + var D = s.prototype; + D.toInt = function() { + return this.unsigned ? this.low >>> 0 : this.low; + }, D.toNumber = function() { + return this.unsigned ? (this.high >>> 0) * m + (this.low >>> 0) : this.high * m + (this.low >>> 0); + }, D.toString = function($) { + if ($ = $ || 10, $ < 2 || 36 < $) + throw RangeError("radix"); + if (this.isZero()) + return "0"; + if (this.isNegative()) + if (this.eq(P)) { + var z = u($), W = this.div(z), q = W.mul(z).sub(this); + return W.toString($) + q.toInt().toString($); + } else + return "-" + this.neg().toString($); + for (var K = u(l($, 6), this.unsigned), j = this, Q = ""; ; ) { + var J = j.div(K), ne = j.sub(J.mul(K)).toInt() >>> 0, se = ne.toString($); + if (j = J, j.isZero()) + return se + Q; + for (; se.length < 6; ) + se = "0" + se; + Q = "" + se + Q; + } + }, D.getHighBits = function() { + return this.high; + }, D.getHighBitsUnsigned = function() { + return this.high >>> 0; + }, D.getLowBits = function() { + return this.low; + }, D.getLowBitsUnsigned = function() { + return this.low >>> 0; + }, D.getNumBitsAbs = function() { + if (this.isNegative()) + return this.eq(P) ? 64 : this.neg().getNumBitsAbs(); + for (var $ = this.high != 0 ? this.high : this.low, z = 31; z > 0 && ($ & 1 << z) == 0; z--) + ; + return this.high != 0 ? z + 33 : z + 1; + }, D.isZero = function() { + return this.high === 0 && this.low === 0; + }, D.eqz = D.isZero, D.isNegative = function() { + return !this.unsigned && this.high < 0; + }, D.isPositive = function() { + return this.unsigned || this.high >= 0; + }, D.isOdd = function() { + return (this.low & 1) === 1; + }, D.isEven = function() { + return (this.low & 1) === 0; + }, D.equals = function($) { + return r($) || ($ = p($)), this.unsigned !== $.unsigned && this.high >>> 31 === 1 && $.high >>> 31 === 1 ? false : this.high === $.high && this.low === $.low; + }, D.eq = D.equals, D.notEquals = function($) { + return !this.eq($); + }, D.neq = D.notEquals, D.ne = D.notEquals, D.lessThan = function($) { + return this.comp($) < 0; + }, D.lt = D.lessThan, D.lessThanOrEqual = function($) { + return this.comp($) <= 0; + }, D.lte = D.lessThanOrEqual, D.le = D.lessThanOrEqual, D.greaterThan = function($) { + return this.comp($) > 0; + }, D.gt = D.greaterThan, D.greaterThanOrEqual = function($) { + return this.comp($) >= 0; + }, D.gte = D.greaterThanOrEqual, D.ge = D.greaterThanOrEqual, D.compare = function($) { + if (r($) || ($ = p($)), this.eq($)) + return 0; + var z = this.isNegative(), W = $.isNegative(); + return z && !W ? -1 : !z && W ? 1 : this.unsigned ? $.high >>> 0 > this.high >>> 0 || $.high === this.high && $.low >>> 0 > this.low >>> 0 ? -1 : 1 : this.sub($).isNegative() ? -1 : 1; + }, D.comp = D.compare, D.negate = function() { + return !this.unsigned && this.eq(P) ? P : this.not().add(k); + }, D.neg = D.negate, D.add = function($) { + r($) || ($ = p($)); + var z = this.high >>> 16, W = this.high & 65535, q = this.low >>> 16, K = this.low & 65535, j = $.high >>> 16, Q = $.high & 65535, J = $.low >>> 16, ne = $.low & 65535, se = 0, te = 0, ie = 0, ee = 0; + return ee += K + ne, ie += ee >>> 16, ee &= 65535, ie += q + J, te += ie >>> 16, ie &= 65535, te += W + Q, se += te >>> 16, te &= 65535, se += z + j, se &= 65535, c(ie << 16 | ee, se << 16 | te, this.unsigned); + }, D.subtract = function($) { + return r($) || ($ = p($)), this.add($.neg()); + }, D.sub = D.subtract, D.multiply = function($) { + if (this.isZero()) + return v; + if (r($) || ($ = p($)), n) { + var z = n.mul(this.low, this.high, $.low, $.high); + return c(z, n.get_high(), this.unsigned); + } + if ($.isZero()) + return v; + if (this.eq(P)) + return $.isOdd() ? P : v; + if ($.eq(P)) + return this.isOdd() ? P : v; + if (this.isNegative()) + return $.isNegative() ? this.neg().mul($.neg()) : this.neg().mul($).neg(); + if ($.isNegative()) + return this.mul($.neg()).neg(); + if (this.lt(y) && $.lt(y)) + return u(this.toNumber() * $.toNumber(), this.unsigned); + var W = this.high >>> 16, q = this.high & 65535, K = this.low >>> 16, j = this.low & 65535, Q = $.high >>> 16, J = $.high & 65535, ne = $.low >>> 16, se = $.low & 65535, te = 0, ie = 0, ee = 0, ce = 0; + return ce += j * se, ee += ce >>> 16, ce &= 65535, ee += K * se, ie += ee >>> 16, ee &= 65535, ee += j * ne, ie += ee >>> 16, ee &= 65535, ie += q * se, te += ie >>> 16, ie &= 65535, ie += K * ne, te += ie >>> 16, ie &= 65535, ie += j * J, te += ie >>> 16, ie &= 65535, te += W * se + q * ne + K * J + j * Q, te &= 65535, c(ee << 16 | ce, te << 16 | ie, this.unsigned); + }, D.mul = D.multiply, D.divide = function($) { + if (r($) || ($ = p($)), $.isZero()) + throw Error("division by zero"); + if (n) { + if (!this.unsigned && this.high === -2147483648 && $.low === -1 && $.high === -1) + return this; + var z = (this.unsigned ? n.div_u : n.div_s)(this.low, this.high, $.low, $.high); + return c(z, n.get_high(), this.unsigned); + } + if (this.isZero()) + return this.unsigned ? x : v; + var W, q, K; + if (this.unsigned) { + if ($.unsigned || ($ = $.toUnsigned()), $.gt(this)) + return x; + if ($.gt(this.shru(1))) + return T; + K = x; + } else { + if (this.eq(P)) { + if ($.eq(k) || $.eq(C)) + return P; + if ($.eq(P)) + return k; + var j = this.shr(1); + return W = j.div($).shl(1), W.eq(v) ? $.isNegative() ? k : C : (q = this.sub($.mul(W)), K = W.add(q.div($)), K); + } else if ($.eq(P)) + return this.unsigned ? x : v; + if (this.isNegative()) + return $.isNegative() ? this.neg().div($.neg()) : this.neg().div($).neg(); + if ($.isNegative()) + return this.div($.neg()).neg(); + K = v; + } + for (q = this; q.gte($); ) { + W = Math.max(1, Math.floor(q.toNumber() / $.toNumber())); + for (var Q = Math.ceil(Math.log(W) / Math.LN2), J = Q <= 48 ? 1 : l(2, Q - 48), ne = u(W), se = ne.mul($); se.isNegative() || se.gt(q); ) + W -= J, ne = u(W, this.unsigned), se = ne.mul($); + ne.isZero() && (ne = k), K = K.add(ne), q = q.sub(se); + } + return K; + }, D.div = D.divide, D.modulo = function($) { + if (r($) || ($ = p($)), n) { + var z = (this.unsigned ? n.rem_u : n.rem_s)(this.low, this.high, $.low, $.high); + return c(z, n.get_high(), this.unsigned); + } + return this.sub(this.div($).mul($)); + }, D.mod = D.modulo, D.rem = D.modulo, D.not = function() { + return c(~this.low, ~this.high, this.unsigned); + }, D.and = function($) { + return r($) || ($ = p($)), c(this.low & $.low, this.high & $.high, this.unsigned); + }, D.or = function($) { + return r($) || ($ = p($)), c(this.low | $.low, this.high | $.high, this.unsigned); + }, D.xor = function($) { + return r($) || ($ = p($)), c(this.low ^ $.low, this.high ^ $.high, this.unsigned); + }, D.shiftLeft = function($) { + return r($) && ($ = $.toInt()), ($ &= 63) === 0 ? this : $ < 32 ? c(this.low << $, this.high << $ | this.low >>> 32 - $, this.unsigned) : c(0, this.low << $ - 32, this.unsigned); + }, D.shl = D.shiftLeft, D.shiftRight = function($) { + return r($) && ($ = $.toInt()), ($ &= 63) === 0 ? this : $ < 32 ? c(this.low >>> $ | this.high << 32 - $, this.high >> $, this.unsigned) : c(this.high >> $ - 32, this.high >= 0 ? 0 : -1, this.unsigned); + }, D.shr = D.shiftRight, D.shiftRightUnsigned = function($) { + if (r($) && ($ = $.toInt()), $ &= 63, $ === 0) + return this; + var z = this.high; + if ($ < 32) { + var W = this.low; + return c(W >>> $ | z << 32 - $, z >>> $, this.unsigned); + } else + return $ === 32 ? c(z, 0, this.unsigned) : c(z >>> $ - 32, 0, this.unsigned); + }, D.shru = D.shiftRightUnsigned, D.shr_u = D.shiftRightUnsigned, D.toSigned = function() { + return this.unsigned ? c(this.low, this.high, false) : this; + }, D.toUnsigned = function() { + return this.unsigned ? this : c(this.low, this.high, true); + }, D.toBytes = function($) { + return $ ? this.toBytesLE() : this.toBytesBE(); + }, D.toBytesLE = function() { + var $ = this.high, z = this.low; + return [z & 255, z >>> 8 & 255, z >>> 16 & 255, z >>> 24, $ & 255, $ >>> 8 & 255, $ >>> 16 & 255, $ >>> 24]; + }, D.toBytesBE = function() { + var $ = this.high, z = this.low; + return [$ >>> 24, $ >>> 16 & 255, $ >>> 8 & 255, $ & 255, z >>> 24, z >>> 16 & 255, z >>> 8 & 255, z & 255]; + }, s.fromBytes = function($, z, W) { + return W ? s.fromBytesLE($, z) : s.fromBytesBE($, z); + }, s.fromBytesLE = function($, z) { + return new s($[0] | $[1] << 8 | $[2] << 16 | $[3] << 24, $[4] | $[5] << 8 | $[6] << 16 | $[7] << 24, z); + }, s.fromBytesBE = function($, z) { + return new s($[4] << 24 | $[5] << 16 | $[6] << 8 | $[7], $[0] << 24 | $[1] << 16 | $[2] << 8 | $[3], z); + }; +} }); +var bT = Pt({ "(disabled):src/node_modules/node-fetch/browser.js"() { +} }); +var yT = Pt({ "(disabled):util"() { +} }); +var vT = Pt({ "src/node_modules/seedrandom/lib/alea.js"(e, t) { + (function(n, s, r) { + function a(c) { + var l = this, d = u(); + l.next = function() { + var p = 2091639 * l.s0 + l.c * 23283064365386963e-26; + return l.s0 = l.s1, l.s1 = l.s2, l.s2 = p - (l.c = p | 0); + }, l.c = 1, l.s0 = d(" "), l.s1 = d(" "), l.s2 = d(" "), l.s0 -= d(c), l.s0 < 0 && (l.s0 += 1), l.s1 -= d(c), l.s1 < 0 && (l.s1 += 1), l.s2 -= d(c), l.s2 < 0 && (l.s2 += 1), d = null; + } + function i(c, l) { + return l.c = c.c, l.s0 = c.s0, l.s1 = c.s1, l.s2 = c.s2, l; + } + function o(c, l) { + var d = new a(c), p = l && l.state, h = d.next; + return h.int32 = function() { + return d.next() * 4294967296 | 0; + }, h.double = function() { + return h() + (h() * 2097152 | 0) * 11102230246251565e-32; + }, h.quick = h, p && (typeof p == "object" && i(p, d), h.state = function() { + return i(d, {}); + }), h; + } + function u() { + var c = 4022871197, l = function(d) { + d = String(d); + for (var p = 0; p < d.length; p++) { + c += d.charCodeAt(p); + var h = 0.02519603282416938 * c; + c = h >>> 0, h -= c, h *= c, c = h >>> 0, h -= c, c += h * 4294967296; + } + return (c >>> 0) * 23283064365386963e-26; + }; + return l; + } + s && s.exports ? s.exports = o : r && r.amd ? r(function() { + return o; + }) : this.alea = o; + })(e, typeof t == "object" && t, typeof define == "function" && define); +} }); +var xT = Pt({ "src/node_modules/seedrandom/lib/xor128.js"(e, t) { + (function(n, s, r) { + function a(u) { + var c = this, l = ""; + c.x = 0, c.y = 0, c.z = 0, c.w = 0, c.next = function() { + var p = c.x ^ c.x << 11; + return c.x = c.y, c.y = c.z, c.z = c.w, c.w ^= c.w >>> 19 ^ p ^ p >>> 8; + }, u === (u | 0) ? c.x = u : l += u; + for (var d = 0; d < l.length + 64; d++) + c.x ^= l.charCodeAt(d) | 0, c.next(); + } + function i(u, c) { + return c.x = u.x, c.y = u.y, c.z = u.z, c.w = u.w, c; + } + function o(u, c) { + var l = new a(u), d = c && c.state, p = function() { + return (l.next() >>> 0) / 4294967296; + }; + return p.double = function() { + do + var h = l.next() >>> 11, f = (l.next() >>> 0) / 4294967296, m = (h + f) / (1 << 21); + while (m === 0); + return m; + }, p.int32 = l.next, p.quick = p, d && (typeof d == "object" && i(d, l), p.state = function() { + return i(l, {}); + }), p; + } + s && s.exports ? s.exports = o : r && r.amd ? r(function() { + return o; + }) : this.xor128 = o; + })(e, typeof t == "object" && t, typeof define == "function" && define); +} }); +var wT = Pt({ "src/node_modules/seedrandom/lib/xorwow.js"(e, t) { + (function(n, s, r) { + function a(u) { + var c = this, l = ""; + c.next = function() { + var p = c.x ^ c.x >>> 2; + return c.x = c.y, c.y = c.z, c.z = c.w, c.w = c.v, (c.d = c.d + 362437 | 0) + (c.v = c.v ^ c.v << 4 ^ (p ^ p << 1)) | 0; + }, c.x = 0, c.y = 0, c.z = 0, c.w = 0, c.v = 0, u === (u | 0) ? c.x = u : l += u; + for (var d = 0; d < l.length + 64; d++) + c.x ^= l.charCodeAt(d) | 0, d == l.length && (c.d = c.x << 10 ^ c.x >>> 4), c.next(); + } + function i(u, c) { + return c.x = u.x, c.y = u.y, c.z = u.z, c.w = u.w, c.v = u.v, c.d = u.d, c; + } + function o(u, c) { + var l = new a(u), d = c && c.state, p = function() { + return (l.next() >>> 0) / 4294967296; + }; + return p.double = function() { + do + var h = l.next() >>> 11, f = (l.next() >>> 0) / 4294967296, m = (h + f) / (1 << 21); + while (m === 0); + return m; + }, p.int32 = l.next, p.quick = p, d && (typeof d == "object" && i(d, l), p.state = function() { + return i(l, {}); + }), p; + } + s && s.exports ? s.exports = o : r && r.amd ? r(function() { + return o; + }) : this.xorwow = o; + })(e, typeof t == "object" && t, typeof define == "function" && define); +} }); +var kT = Pt({ "src/node_modules/seedrandom/lib/xorshift7.js"(e, t) { + (function(n, s, r) { + function a(u) { + var c = this; + c.next = function() { + var d = c.x, p = c.i, h, f, m; + return h = d[p], h ^= h >>> 7, f = h ^ h << 24, h = d[p + 1 & 7], f ^= h ^ h >>> 10, h = d[p + 3 & 7], f ^= h ^ h >>> 3, h = d[p + 4 & 7], f ^= h ^ h << 7, h = d[p + 7 & 7], h = h ^ h << 13, f ^= h ^ h << 9, d[p] = f, c.i = p + 1 & 7, f; + }; + function l(d, p) { + var h, f, m = []; + if (p === (p | 0)) + f = m[0] = p; + else + for (p = "" + p, h = 0; h < p.length; ++h) + m[h & 7] = m[h & 7] << 15 ^ p.charCodeAt(h) + m[h + 1 & 7] << 13; + for (; m.length < 8; ) + m.push(0); + for (h = 0; h < 8 && m[h] === 0; ++h) + ; + for (h == 8 ? f = m[7] = -1 : f = m[h], d.x = m, d.i = 0, h = 256; h > 0; --h) + d.next(); + } + l(c, u); + } + function i(u, c) { + return c.x = u.x.slice(), c.i = u.i, c; + } + function o(u, c) { + u == null && (u = +new Date()); + var l = new a(u), d = c && c.state, p = function() { + return (l.next() >>> 0) / 4294967296; + }; + return p.double = function() { + do + var h = l.next() >>> 11, f = (l.next() >>> 0) / 4294967296, m = (h + f) / (1 << 21); + while (m === 0); + return m; + }, p.int32 = l.next, p.quick = p, d && (d.x && i(d, l), p.state = function() { + return i(l, {}); + }), p; + } + s && s.exports ? s.exports = o : r && r.amd ? r(function() { + return o; + }) : this.xorshift7 = o; + })(e, typeof t == "object" && t, typeof define == "function" && define); +} }); +var IT = Pt({ "src/node_modules/seedrandom/lib/xor4096.js"(e, t) { + (function(n, s, r) { + function a(u) { + var c = this; + c.next = function() { + var d = c.w, p = c.X, h = c.i, f, m; + return c.w = d = d + 1640531527 | 0, m = p[h + 34 & 127], f = p[h = h + 1 & 127], m ^= m << 13, f ^= f << 17, m ^= m >>> 15, f ^= f >>> 12, m = p[h] = m ^ f, c.i = h, m + (d ^ d >>> 16) | 0; + }; + function l(d, p) { + var h, f, m, g, b, y = [], v = 128; + for (p === (p | 0) ? (f = p, p = null) : (p = p + "\0", f = 0, v = Math.max(v, p.length)), m = 0, g = -32; g < v; ++g) + p && (f ^= p.charCodeAt((g + 32) % p.length)), g === 0 && (b = f), f ^= f << 10, f ^= f >>> 15, f ^= f << 4, f ^= f >>> 13, g >= 0 && (b = b + 1640531527 | 0, h = y[g & 127] ^= f + b, m = h == 0 ? m + 1 : 0); + for (m >= 128 && (y[(p && p.length || 0) & 127] = -1), m = 127, g = 4 * 128; g > 0; --g) + f = y[m + 34 & 127], h = y[m = m + 1 & 127], f ^= f << 13, h ^= h << 17, f ^= f >>> 15, h ^= h >>> 12, y[m] = f ^ h; + d.w = b, d.X = y, d.i = m; + } + l(c, u); + } + function i(u, c) { + return c.i = u.i, c.w = u.w, c.X = u.X.slice(), c; + } + function o(u, c) { + u == null && (u = +new Date()); + var l = new a(u), d = c && c.state, p = function() { + return (l.next() >>> 0) / 4294967296; + }; + return p.double = function() { + do + var h = l.next() >>> 11, f = (l.next() >>> 0) / 4294967296, m = (h + f) / (1 << 21); + while (m === 0); + return m; + }, p.int32 = l.next, p.quick = p, d && (d.X && i(d, l), p.state = function() { + return i(l, {}); + }), p; + } + s && s.exports ? s.exports = o : r && r.amd ? r(function() { + return o; + }) : this.xor4096 = o; + })(e, typeof t == "object" && t, typeof define == "function" && define); +} }); +var ST = Pt({ "src/node_modules/seedrandom/lib/tychei.js"(e, t) { + (function(n, s, r) { + function a(u) { + var c = this, l = ""; + c.next = function() { + var p = c.b, h = c.c, f = c.d, m = c.a; + return p = p << 25 ^ p >>> 7 ^ h, h = h - f | 0, f = f << 24 ^ f >>> 8 ^ m, m = m - p | 0, c.b = p = p << 20 ^ p >>> 12 ^ h, c.c = h = h - f | 0, c.d = f << 16 ^ h >>> 16 ^ m, c.a = m - p | 0; + }, c.a = 0, c.b = 0, c.c = -1640531527, c.d = 1367130551, u === Math.floor(u) ? (c.a = u / 4294967296 | 0, c.b = u | 0) : l += u; + for (var d = 0; d < l.length + 20; d++) + c.b ^= l.charCodeAt(d) | 0, c.next(); + } + function i(u, c) { + return c.a = u.a, c.b = u.b, c.c = u.c, c.d = u.d, c; + } + function o(u, c) { + var l = new a(u), d = c && c.state, p = function() { + return (l.next() >>> 0) / 4294967296; + }; + return p.double = function() { + do + var h = l.next() >>> 11, f = (l.next() >>> 0) / 4294967296, m = (h + f) / (1 << 21); + while (m === 0); + return m; + }, p.int32 = l.next, p.quick = p, d && (typeof d == "object" && i(d, l), p.state = function() { + return i(l, {}); + }), p; + } + s && s.exports ? s.exports = o : r && r.amd ? r(function() { + return o; + }) : this.tychei = o; + })(e, typeof t == "object" && t, typeof define == "function" && define); +} }); +var CT = Pt({ "(disabled):crypto"() { +} }); +var NT = Pt({ "src/node_modules/seedrandom/seedrandom.js"(e, t) { + (function(n, s, r) { + var a = 256, i = 6, o = 52, u = "random", c = r.pow(a, i), l = r.pow(2, o), d = l * 2, p = a - 1, h; + function f(k, T, C) { + var R = []; + T = T == true ? { entropy: true } : T || {}; + var _ = y(b(T.entropy ? [k, x(s)] : k == null ? v() : k, 3), R), P = new m(R), D = function() { + for (var F = P.g(i), $ = c, z = 0; F < l; ) + F = (F + z) * a, $ *= a, z = P.g(1); + for (; F >= d; ) + F /= 2, $ /= 2, z >>>= 1; + return (F + z) / $; + }; + return D.int32 = function() { + return P.g(4) | 0; + }, D.quick = function() { + return P.g(4) / 4294967296; + }, D.double = D, y(x(P.S), s), (T.pass || C || function(F, $, z, W) { + return W && (W.S && g(W, P), F.state = function() { + return g(P, {}); + }), z ? (r[u] = F, $) : F; + })(D, _, "global" in T ? T.global : this == r, T.state); + } + function m(k) { + var T, C = k.length, R = this, _ = 0, P = R.i = R.j = 0, D = R.S = []; + for (C || (k = [C++]); _ < a; ) + D[_] = _++; + for (_ = 0; _ < a; _++) + D[_] = D[P = p & P + k[_ % C] + (T = D[_])], D[P] = T; + (R.g = function(F) { + for (var $, z = 0, W = R.i, q = R.j, K = R.S; F--; ) + $ = K[W = p & W + 1], z = z * a + K[p & (K[W] = K[q = p & q + $]) + (K[q] = $)]; + return R.i = W, R.j = q, z; + })(a); + } + function g(k, T) { + return T.i = k.i, T.j = k.j, T.S = k.S.slice(), T; + } + function b(k, T) { + var C = [], R = typeof k, _; + if (T && R == "object") + for (_ in k) + try { + C.push(b(k[_], T - 1)); + } catch (P) { + } + return C.length ? C : R == "string" ? k : k + "\0"; + } + function y(k, T) { + for (var C = k + "", R, _ = 0; _ < C.length; ) + T[p & _] = p & (R ^= T[p & _] * 19) + C.charCodeAt(_++); + return x(T); + } + function v() { + try { + var k; + return h && (k = h.randomBytes) ? k = k(a) : (k = new Uint8Array(a), (n.crypto || n.msCrypto).getRandomValues(k)), x(k); + } catch (R) { + var T = n.navigator, C = T && T.plugins; + return [+new Date(), n, C, n.screen, x(s)]; + } + } + function x(k) { + return String.fromCharCode.apply(0, k); + } + if (y(r.random(), s), typeof t == "object" && t.exports) { + t.exports = f; + try { + h = CT(); + } catch (k) { + } + } else + typeof define == "function" && define.amd ? define(function() { + return f; + }) : r["seed" + u] = f; + })(typeof self != "undefined" ? self : e, [], Math); +} }); +var Fd = Pt({ "src/node_modules/seedrandom/index.js"(e, t) { + var n = vT(), s = xT(), r = wT(), a = kT(), i = IT(), o = ST(), u = NT(); + u.alea = n, u.xor128 = s, u.xorwow = r, u.xorshift7 = a, u.xor4096 = i, u.tychei = o, t.exports = u; +} }); +var fw = Pt({ "(disabled):src/node_modules/string_decoder/index.js"() { +} }); +var sd = Pt({ "(disabled):fs"() { +} }); +var Ou = Pt({ "(disabled):path"() { +} }); +var TT = Pt({ "(disabled):worker_threads"() { +} }); +var $T = Pt({ "(disabled):perf_hooks"() { +} }); +var AT = Pt({ "(disabled):os"() { +} }); +var ET = Pt({ "src/tfjs-backend-wasm/wasm-out/tfjs-backend-wasm-threaded-simd.js"(e, t) { + var n = function() { + var s = typeof document != "undefined" && document.currentScript ? document.currentScript.src : void 0; + return typeof __filename != "undefined" && (s = s || __filename), function(r) { + r = r || {}; + function a() { + return ee.buffer != Qe && fn(ee.buffer), Sn; + } + function i() { + return ee.buffer != Qe && fn(ee.buffer), Ht; + } + function o() { + return ee.buffer != Qe && fn(ee.buffer), Cn; + } + function u() { + return ee.buffer != Qe && fn(ee.buffer), Rn; + } + function c() { + return ee.buffer != Qe && fn(ee.buffer), Dn; + } + var l = typeof r != "undefined" ? r : {}, d, p; + l.ready = new Promise(function(S, A) { + d = S, p = A; + }); + var h; + typeof process != "undefined" && process.listeners && (h = { uncaughtException: process.listeners("uncaughtException"), unhandledRejection: process.listeners("unhandledRejection") }); + var f = {}, m; + for (m in l) + l.hasOwnProperty(m) && (f[m] = l[m]); + var g = [], b = "./this.program", y = function(S, A) { + throw A; + }, v = false, x = false, k = false, T = false; + v = typeof window == "object", x = typeof importScripts == "function", k = typeof process == "object" && typeof process.versions == "object" && typeof process.versions.node == "string", T = !v && !k && !x; + var C = l.ENVIRONMENT_IS_PTHREAD || false; + C && (Qe = l.buffer); + var R = ""; + function _(S) { + return l.locateFile ? l.locateFile(S, R) : R + S; + } + var P, D, F, $, z, W; + if (k) { + x ? R = Ou().dirname(R) + "/" : R = __dirname + "/", P = function(A, M) { + return z || (z = sd()), W || (W = Ou()), A = W.normalize(A), z.readFileSync(A, M ? null : "utf8"); + }, F = function(A) { + var M = P(A, true); + return M.buffer || (M = new Uint8Array(M)), we(M.buffer), M; + }, process.argv.length > 1 && (b = process.argv[1].replace(/\\/g, "/")), g = process.argv.slice(2), process.on("uncaughtException", function(S) { + if (!(S instanceof wu)) + throw S; + }), process.on("unhandledRejection", zs), y = function(S) { + process.exit(S); + }, l.inspect = function() { + return "[Emscripten Module object]"; + }; + var q; + try { + q = TT(); + } catch (S) { + throw console.error('The "worker_threads" module is not supported in this node.js build - perhaps a newer version is needed?'), S; + } + global.Worker = q.Worker; + } else + T ? (typeof read != "undefined" && (P = function(A) { + return read(A); + }), F = function(A) { + var M; + return typeof readbuffer == "function" ? new Uint8Array(readbuffer(A)) : (M = read(A, "binary"), we(typeof M == "object"), M); + }, typeof scriptArgs != "undefined" ? g = scriptArgs : typeof arguments != "undefined" && (g = arguments), typeof quit == "function" && (y = function(S) { + quit(S); + }), typeof print != "undefined" && (typeof console == "undefined" && (console = {}), console.log = print, console.warn = console.error = typeof printErr != "undefined" ? printErr : print)) : (v || x) && (x ? R = self.location.href : typeof document != "undefined" && document.currentScript && (R = document.currentScript.src), typeof s != "undefined" && s && (R = s), R.indexOf("blob:") !== 0 ? R = R.substr(0, R.lastIndexOf("/") + 1) : R = "", k ? (P = function(A, M) { + return z || (z = sd()), W || (W = Ou()), A = W.normalize(A), z.readFileSync(A, M ? null : "utf8"); + }, F = function(A) { + var M = P(A, true); + return M.buffer || (M = new Uint8Array(M)), we(M.buffer), M; + }) : (P = function(S) { + var A = new XMLHttpRequest(); + return A.open("GET", S, false), A.send(null), A.responseText; + }, x && (F = function(S) { + var A = new XMLHttpRequest(); + return A.open("GET", S, false), A.responseType = "arraybuffer", A.send(null), new Uint8Array(A.response); + }), D = function(S, A, M) { + var Y = new XMLHttpRequest(); + Y.open("GET", S, true), Y.responseType = "arraybuffer", Y.onload = function() { + if (Y.status == 200 || Y.status == 0 && Y.response) { + A(Y.response); + return; + } + M(); + }, Y.onerror = M, Y.send(null); + }), $ = function(S) { + document.title = S; + }); + k && typeof performance == "undefined" && (global.performance = $T().performance); + var K = l.print || console.log.bind(console), j = l.printErr || console.warn.bind(console); + for (m in f) + f.hasOwnProperty(m) && (l[m] = f[m]); + f = null, l.arguments && (g = l.arguments), l.thisProgram && (b = l.thisProgram), l.quit && (y = l.quit); + function Q(S) { + Q.shown || (Q.shown = {}), Q.shown[S] || (Q.shown[S] = 1, j(S)); + } + var J = Atomics.load, ne = Atomics.store, se = Atomics.compareExchange, te; + l.wasmBinary && (te = l.wasmBinary); + var ie = l.noExitRuntime || true; + typeof WebAssembly != "object" && zs("no native wasm support detected"); + var ee, ce, me = false, Ie; + function we(S, A) { + S || zs("Assertion failed: " + A); + } + function Te(S) { + var A = l["_" + S]; + return we(A, "Cannot call unknown function " + S + ", make sure it is exported"), A; + } + function Fe(S, A, M, Y, de) { + var ue = { string: function(en) { + var Ei = 0; + if (en != null && en !== 0) { + var Ev = (en.length << 2) + 1; + Ei = Ti(Ev), Ye(en, Ei, Ev); + } + return Ei; + }, array: function(en) { + var Ei = Ti(en.length); + return ht(en, Ei), Ei; + } }; + function le(en) { + return A === "string" ? Be(en) : A === "boolean" ? Boolean(en) : en; + } + var ye = Te(S), et = [], zt = 0; + if (Y) + for (var Et = 0; Et < Y.length; Et++) { + var ar = ue[M[Et]]; + ar ? (zt === 0 && (zt = xu()), et[Et] = ar(Y[Et])) : et[Et] = Y[Et]; + } + var Ai = ye.apply(null, et); + return Ai = le(Ai), zt !== 0 && Ni(zt), Ai; + } + function Xe(S, A, M, Y) { + M = M || []; + var de = M.every(function(le) { + return le === "number"; + }), ue = A !== "string"; + return ue && de && !Y ? Te(S) : function() { + return Fe(S, A, M, arguments, Y); + }; + } + function We(S, A, M) { + for (var Y = A + M, de = ""; !(A >= Y); ) { + var ue = S[A++]; + if (!ue) + return de; + if (!(ue & 128)) { + de += String.fromCharCode(ue); + continue; + } + var le = S[A++] & 63; + if ((ue & 224) == 192) { + de += String.fromCharCode((ue & 31) << 6 | le); + continue; + } + var ye = S[A++] & 63; + if ((ue & 240) == 224 ? ue = (ue & 15) << 12 | le << 6 | ye : ue = (ue & 7) << 18 | le << 12 | ye << 6 | S[A++] & 63, ue < 65536) + de += String.fromCharCode(ue); + else { + var et = ue - 65536; + de += String.fromCharCode(55296 | et >> 10, 56320 | et & 1023); + } + } + return de; + } + function Be(S, A) { + return S ? We(i(), S, A) : ""; + } + function rt(S, A, M, Y) { + if (!(Y > 0)) + return 0; + for (var de = M, ue = M + Y - 1, le = 0; le < S.length; ++le) { + var ye = S.charCodeAt(le); + if (ye >= 55296 && ye <= 57343) { + var et = S.charCodeAt(++le); + ye = 65536 + ((ye & 1023) << 10) | et & 1023; + } + if (ye <= 127) { + if (M >= ue) + break; + A[M++] = ye; + } else if (ye <= 2047) { + if (M + 1 >= ue) + break; + A[M++] = 192 | ye >> 6, A[M++] = 128 | ye & 63; + } else if (ye <= 65535) { + if (M + 2 >= ue) + break; + A[M++] = 224 | ye >> 12, A[M++] = 128 | ye >> 6 & 63, A[M++] = 128 | ye & 63; + } else { + if (M + 3 >= ue) + break; + A[M++] = 240 | ye >> 18, A[M++] = 128 | ye >> 12 & 63, A[M++] = 128 | ye >> 6 & 63, A[M++] = 128 | ye & 63; + } + } + return A[M] = 0, M - de; + } + function Ye(S, A, M) { + return rt(S, i(), A, M); + } + function at(S) { + for (var A = 0, M = 0; M < S.length; ++M) { + var Y = S.charCodeAt(M); + Y >= 55296 && Y <= 57343 && (Y = 65536 + ((Y & 1023) << 10) | S.charCodeAt(++M) & 1023), Y <= 127 ? ++A : Y <= 2047 ? A += 2 : Y <= 65535 ? A += 3 : A += 4; + } + return A; + } + function ht(S, A) { + a().set(S, A); + } + function pn(S, A) { + return S % A > 0 && (S += A - S % A), S; + } + var Qe, Sn, Ht, Hn, hn, Cn, Rn, qn, Dn; + function fn(S) { + Qe = S, l.HEAP8 = Sn = new Int8Array(S), l.HEAP16 = Hn = new Int16Array(S), l.HEAP32 = Cn = new Int32Array(S), l.HEAPU8 = Ht = new Uint8Array(S), l.HEAPU16 = hn = new Uint16Array(S), l.HEAPU32 = Rn = new Uint32Array(S), l.HEAPF32 = qn = new Float32Array(S), l.HEAPF64 = Dn = new Float64Array(S); + } + var Zs = l.INITIAL_MEMORY || 16777216; + if (C) + ee = l.wasmMemory, Qe = l.buffer; + else if (l.wasmMemory) + ee = l.wasmMemory; + else if (ee = new WebAssembly.Memory({ initial: Zs / 65536, maximum: 32768, shared: true }), !(ee.buffer instanceof SharedArrayBuffer)) + throw j("requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag"), k && console.log("(on node you may need: --experimental-wasm-threads --experimental-wasm-bulk-memory and also use a recent version)"), Error("bad memory"); + ee && (Qe = ee.buffer), Zs = Qe.byteLength, fn(Qe); + var Qt, Js = [], Fn = [], lu = [], cu = [], Ps = [], ic = false, qp = false; + C || Fn.push({ func: function() { + Sc(); + } }); + function oc() { + if (!C) { + if (l.preRun) + for (typeof l.preRun == "function" && (l.preRun = [l.preRun]); l.preRun.length; ) + jp(l.preRun.shift()); + wi(Js); + } + } + function uc() { + ic = true, !C && wi(Fn); + } + function lc() { + C || wi(lu); + } + function mn() { + C || (qp = true); + } + function cc() { + if (!C) { + if (l.postRun) + for (typeof l.postRun == "function" && (l.postRun = [l.postRun]); l.postRun.length; ) + Kp(l.postRun.shift()); + wi(Ps); + } + } + function jp(S) { + Js.unshift(S); + } + function Kp(S) { + Ps.unshift(S); + } + var jn = 0, du = null, zr = null; + function Xp(S) { + we(!C, "addRunDependency cannot be used in a pthread worker"), jn++, l.monitorRunDependencies && l.monitorRunDependencies(jn); + } + function Yp(S) { + if (jn--, l.monitorRunDependencies && l.monitorRunDependencies(jn), jn == 0 && (du !== null && (clearInterval(du), du = null), zr)) { + var A = zr; + zr = null, A(); + } + } + l.preloadedImages = {}, l.preloadedAudios = {}; + function zs(S) { + l.onAbort && l.onAbort(S), C && console.error("Pthread aborting at " + new Error().stack), S += "", j(S), me = true, Ie = 1, S = "abort(" + S + "). Build with -s ASSERTIONS=1 for more info."; + var A = new WebAssembly.RuntimeError(S); + throw p(A), A; + } + function Mr(S, A) { + return String.prototype.startsWith ? S.startsWith(A) : S.indexOf(A) === 0; + } + var Qp = "data:application/octet-stream;base64,"; + function dc(S) { + return Mr(S, Qp); + } + var Zp = "file://"; + function pc(S) { + return Mr(S, Zp); + } + var gn = "tfjs-backend-wasm-threaded-simd.wasm"; + dc(gn) || (gn = _(gn)); + function Jp(S) { + try { + if (S == gn && te) + return new Uint8Array(te); + if (F) + return F(S); + throw "both async and sync fetching of the wasm failed"; + } catch (A) { + zs(A); + } + } + function hc() { + if (!te && (v || x)) { + if (typeof fetch == "function" && !pc(gn)) + return fetch(gn, { credentials: "same-origin" }).then(function(S) { + if (!S.ok) + throw "failed to load wasm binary file at '" + gn + "'"; + return S.arrayBuffer(); + }).catch(function() { + return Jp(gn); + }); + if (D) + return new Promise(function(S, A) { + D(gn, function(M) { + S(new Uint8Array(M)); + }, A); + }); + } + return Promise.resolve().then(function() { + return Jp(gn); + }); + } + function eh() { + var S = { a: qh }; + function A(le, ye) { + var et = le.exports; + if (l.asm = et, Qt = l.asm.nb, ce = ye, !C) { + var zt = ke.unusedWorkers.length; + ke.unusedWorkers.forEach(function(Et) { + ke.loadWasmModuleToWorker(Et, function() { + --zt || Yp("wasm-instantiate"); + }); + }); + } + } + C || Xp("wasm-instantiate"); + function M(le) { + A(le.instance, le.module); + } + function Y(le) { + return hc().then(function(ye) { + return WebAssembly.instantiate(ye, S); + }).then(le, function(ye) { + j("failed to asynchronously prepare wasm: " + ye), zs(ye); + }); + } + function de() { + return !te && typeof WebAssembly.instantiateStreaming == "function" && !dc(gn) && !pc(gn) && typeof fetch == "function" ? fetch(gn, { credentials: "same-origin" }).then(function(le) { + var ye = WebAssembly.instantiateStreaming(le, S); + return ye.then(M, function(et) { + return j("wasm streaming compile failed: " + et), j("falling back to ArrayBuffer instantiation"), Y(M); + }); + }) : Y(M); + } + if (l.instantiateWasm) + try { + var ue = l.instantiateWasm(S, A); + return ue; + } catch (le) { + return j("Module.instantiateWasm callback failed with error: " + le), false; + } + return de().catch(p), {}; + } + var fc = { 10216: function() { + throw "Canceled!"; + }, 10234: function(S, A) { + setTimeout(function() { + Sv(S, A); + }, 0); + } }; + function th() { + ke.initRuntime(); + } + function wi(S) { + for (; S.length > 0; ) { + var A = S.shift(); + if (typeof A == "function") { + A(l); + continue; + } + var M = A.func; + typeof M == "number" ? A.arg === void 0 ? Qt.get(M)() : Qt.get(M)(A.arg) : M(A.arg === void 0 ? null : A.arg); + } + } + var er = { EPERM: 63, ENOENT: 44, ESRCH: 71, EINTR: 27, EIO: 29, ENXIO: 60, E2BIG: 1, ENOEXEC: 45, EBADF: 8, ECHILD: 12, EAGAIN: 6, EWOULDBLOCK: 6, ENOMEM: 48, EACCES: 2, EFAULT: 21, ENOTBLK: 105, EBUSY: 10, EEXIST: 20, EXDEV: 75, ENODEV: 43, ENOTDIR: 54, EISDIR: 31, EINVAL: 28, ENFILE: 41, EMFILE: 33, ENOTTY: 59, ETXTBSY: 74, EFBIG: 22, ENOSPC: 51, ESPIPE: 70, EROFS: 69, EMLINK: 34, EPIPE: 64, EDOM: 18, ERANGE: 68, ENOMSG: 49, EIDRM: 24, ECHRNG: 106, EL2NSYNC: 156, EL3HLT: 107, EL3RST: 108, ELNRNG: 109, EUNATCH: 110, ENOCSI: 111, EL2HLT: 112, EDEADLK: 16, ENOLCK: 46, EBADE: 113, EBADR: 114, EXFULL: 115, ENOANO: 104, EBADRQC: 103, EBADSLT: 102, EDEADLOCK: 16, EBFONT: 101, ENOSTR: 100, ENODATA: 116, ETIME: 117, ENOSR: 118, ENONET: 119, ENOPKG: 120, EREMOTE: 121, ENOLINK: 47, EADV: 122, ESRMNT: 123, ECOMM: 124, EPROTO: 65, EMULTIHOP: 36, EDOTDOT: 125, EBADMSG: 9, ENOTUNIQ: 126, EBADFD: 127, EREMCHG: 128, ELIBACC: 129, ELIBBAD: 130, ELIBSCN: 131, ELIBMAX: 132, ELIBEXEC: 133, ENOSYS: 52, ENOTEMPTY: 55, ENAMETOOLONG: 37, ELOOP: 32, EOPNOTSUPP: 138, EPFNOSUPPORT: 139, ECONNRESET: 15, ENOBUFS: 42, EAFNOSUPPORT: 5, EPROTOTYPE: 67, ENOTSOCK: 57, ENOPROTOOPT: 50, ESHUTDOWN: 140, ECONNREFUSED: 14, EADDRINUSE: 3, ECONNABORTED: 13, ENETUNREACH: 40, ENETDOWN: 38, ETIMEDOUT: 73, EHOSTDOWN: 142, EHOSTUNREACH: 23, EINPROGRESS: 26, EALREADY: 7, EDESTADDRREQ: 17, EMSGSIZE: 35, EPROTONOSUPPORT: 66, ESOCKTNOSUPPORT: 137, EADDRNOTAVAIL: 4, ENETRESET: 39, EISCONN: 30, ENOTCONN: 53, ETOOMANYREFS: 141, EUSERS: 136, EDQUOT: 19, ESTALE: 72, ENOTSUP: 138, ENOMEDIUM: 148, EILSEQ: 25, EOVERFLOW: 61, ECANCELED: 11, ENOTRECOVERABLE: 56, EOWNERDEAD: 62, ESTRPIPE: 135 }; + function pu(S, A) { + if (S <= 0 || S > a().length || S & true || A < 0) + return -28; + if (A == 0) + return 0; + A >= 2147483647 && (A = 1 / 0); + var M = Atomics.load(o(), $i >> 2), Y = 0; + if (M == S) { + var de = Atomics.compareExchange(o(), $i >> 2, M, 0); + if (de == M && (--A, Y = 1, A <= 0)) + return 1; + } + var ue = Atomics.notify(o(), S >> 2, A); + if (ue >= 0) + return ue + Y; + throw "Atomics.notify returned an unexpected value " + ue; + } + l._emscripten_futex_wake = pu; + function nh(S) { + if (C) + throw "Internal Error! killThread() can only ever be called from main application thread!"; + if (!S) + throw "Internal Error! Null pthread_ptr in killThread!"; + o()[S + 12 >> 2] = 0; + var A = ke.pthreads[S]; + A.worker.terminate(), ke.freeThreadData(A), ke.runningWorkers.splice(ke.runningWorkers.indexOf(A.worker), 1), A.worker.pthread = void 0; + } + function sh(S) { + if (C) + throw "Internal Error! cancelThread() can only ever be called from main application thread!"; + if (!S) + throw "Internal Error! Null pthread_ptr in cancelThread!"; + var A = ke.pthreads[S]; + A.worker.postMessage({ cmd: "cancel" }); + } + function mc(S) { + if (C) + throw "Internal Error! cleanupThread() can only ever be called from main application thread!"; + if (!S) + throw "Internal Error! Null pthread_ptr in cleanupThread!"; + var A = ke.pthreads[S]; + if (A) { + o()[S + 12 >> 2] = 0; + var M = A.worker; + ke.returnWorkerToPool(M); + } + } + var ke = { unusedWorkers: [], runningWorkers: [], initMainThreadBlock: function() { + for (var S = 8, A = 0; A < S; ++A) + ke.allocateUnusedWorker(); + }, initRuntime: function() { + for (var S = Br(228), A = 0; A < 228 / 4; ++A) + u()[S / 4 + A] = 0; + o()[S + 12 >> 2] = S; + var M = S + 152; + o()[M >> 2] = M; + for (var Y = Br(512), A = 0; A < 128; ++A) + u()[Y / 4 + A] = 0; + Atomics.store(u(), S + 100 >> 2, Y), Atomics.store(u(), S + 40 >> 2, S), bf(S, !x, 1), kv(S); + }, initWorker: function() { + }, pthreads: {}, threadExitHandlers: [], setThreadStatus: function() { + }, runExitHandlers: function() { + for (; ke.threadExitHandlers.length > 0; ) + ke.threadExitHandlers.pop()(); + C && rr() && wv(); + }, runExitHandlersAndDeinitThread: function(S, A) { + Atomics.store(u(), S + 56 >> 2, 1), Atomics.store(u(), S + 60 >> 2, 0), ke.runExitHandlers(), Atomics.store(u(), S + 4 >> 2, A), Atomics.store(u(), S + 0 >> 2, 1), pu(S + 0, 2147483647), bf(0, 0, 0); + }, threadExit: function(S) { + var A = rr(); + A && (ke.runExitHandlersAndDeinitThread(A, S), C && postMessage({ cmd: "exit" })); + }, threadCancel: function() { + ke.runExitHandlersAndDeinitThread(rr(), -1), postMessage({ cmd: "cancelDone" }); + }, terminateAllThreads: function() { + for (var S in ke.pthreads) { + var A = ke.pthreads[S]; + A && A.worker && ke.returnWorkerToPool(A.worker); + } + ke.pthreads = {}; + for (var M = 0; M < ke.unusedWorkers.length; ++M) { + var Y = ke.unusedWorkers[M]; + Y.terminate(); + } + ke.unusedWorkers = []; + for (var M = 0; M < ke.runningWorkers.length; ++M) { + var Y = ke.runningWorkers[M], A = Y.pthread; + ke.freeThreadData(A), Y.terminate(); + } + ke.runningWorkers = []; + }, freeThreadData: function(S) { + if (!!S) { + if (S.threadInfoStruct) { + var A = o()[S.threadInfoStruct + 100 >> 2]; + o()[S.threadInfoStruct + 100 >> 2] = 0, vu(A), vu(S.threadInfoStruct); + } + S.threadInfoStruct = 0, S.allocatedOwnStack && S.stackBase && vu(S.stackBase), S.stackBase = 0, S.worker && (S.worker.pthread = null); + } + }, returnWorkerToPool: function(S) { + ke.runWithoutMainThreadQueuedCalls(function() { + delete ke.pthreads[S.pthread.threadInfoStruct], ke.unusedWorkers.push(S), ke.runningWorkers.splice(ke.runningWorkers.indexOf(S), 1), ke.freeThreadData(S.pthread), S.pthread = void 0; + }); + }, runWithoutMainThreadQueuedCalls: function(S) { + o()[Av >> 2] = 0; + try { + S(); + } finally { + o()[Av >> 2] = 1; + } + }, receiveObjectTransfer: function(S) { + }, loadWasmModuleToWorker: function(S, A) { + S.onmessage = function(M) { + var Y = M.data, de = Y.cmd; + if (S.pthread && (ke.currentProxiedOperationCallerThread = S.pthread.threadInfoStruct), Y.targetThread && Y.targetThread != rr()) { + var ue = ke.pthreads[Y.targetThread]; + ue ? ue.worker.postMessage(M.data, Y.transferList) : console.error('Internal error! Worker sent a message "' + de + '" to target pthread ' + Y.targetThread + ", but that thread no longer exists!"), ke.currentProxiedOperationCallerThread = void 0; + return; + } + if (de === "processQueuedMainThreadWork") + Ac(); + else if (de === "spawnThread") + kc(M.data); + else if (de === "cleanupThread") + mc(Y.thread); + else if (de === "killThread") + nh(Y.thread); + else if (de === "cancelThread") + sh(Y.thread); + else if (de === "loaded") + S.loaded = true, A && A(S), S.runPthread && (S.runPthread(), delete S.runPthread); + else if (de === "print") + K("Thread " + Y.threadId + ": " + Y.text); + else if (de === "printErr") + j("Thread " + Y.threadId + ": " + Y.text); + else if (de === "alert") + alert("Thread " + Y.threadId + ": " + Y.text); + else if (de === "exit") { + var le = S.pthread && Atomics.load(u(), S.pthread.threadInfoStruct + 64 >> 2); + le && ke.returnWorkerToPool(S); + } else if (de === "exitProcess") + try { + uT(Y.returnCode); + } catch (ye) { + if (ye instanceof wu) + return; + throw ye; + } + else + de === "cancelDone" ? ke.returnWorkerToPool(S) : de === "objectTransfer" ? ke.receiveObjectTransfer(M.data) : M.data.target === "setimmediate" ? S.postMessage(M.data) : j("worker sent an unknown command " + de); + ke.currentProxiedOperationCallerThread = void 0; + }, S.onerror = function(M) { + j("pthread sent an error! " + M.filename + ":" + M.lineno + ": " + M.message); + }, k && (S.on("message", function(M) { + S.onmessage({ data: M }); + }), S.on("error", function(M) { + S.onerror(M); + }), S.on("exit", function(M) { + })), S.postMessage({ cmd: "load", urlOrBlob: l.mainScriptUrlOrBlob || s, wasmMemory: ee, wasmModule: ce }); + }, allocateUnusedWorker: function() { + var S = _("tfjs-backend-wasm-threaded-simd.worker.js"); + ke.unusedWorkers.push(new Worker(S)); + }, getNewWorker: function() { + return ke.unusedWorkers.length == 0 && (ke.allocateUnusedWorker(), ke.loadWasmModuleToWorker(ke.unusedWorkers[0])), ke.unusedWorkers.length > 0 ? ke.unusedWorkers.pop() : null; + }, busySpinWait: function(S) { + for (var A = performance.now() + S; performance.now() < A; ) + ; + } }; + function rh(S, A) { + Tv(S, A), Ni(S); + } + l.establishStackSpace = rh; + function ah() { + return ie; + } + l.getNoExitRuntime = ah; + function ih(S, A) { + return Qt.get(S)(A); + } + l.invokeEntryPoint = ih; + function oh(S, A, M, Y) { + zs("Assertion failed: " + Be(S) + ", at: " + [A ? Be(A) : "unknown filename", M, Y ? Be(Y) : "unknown function"]); + } + function uh(S, A) { + var M = _main(S, A); + } + var Lr; + k ? Lr = function() { + var S = process.hrtime(); + return S[0] * 1e3 + S[1] / 1e6; + } : C ? Lr = function() { + return performance.now() - l.__performance_now_clock_drift; + } : typeof dateNow != "undefined" ? Lr = dateNow : Lr = function() { + return performance.now(); + }; + function lh(S) { + return o()[vv() >> 2] = S, S; + } + function ch(S, A) { + if (C) + return tr(1, 1, S, A); + } + function dh(S, A) { + if (S == A) + postMessage({ cmd: "processQueuedMainThreadWork" }); + else if (C) + postMessage({ targetThread: S, cmd: "processThreadQueue" }); + else { + var M = ke.pthreads[S], Y = M && M.worker; + if (!Y) + return; + Y.postMessage({ cmd: "processThreadQueue" }); + } + return 1; + } + function ph() { + zs(); + } + function hh(S, A, M) { + var Y = bh(A, M); + return fc[S].apply(null, Y); + } + function fh(S, A) { + } + function gc(S, A, M) { + if (S <= 0 || S > a().length || S & true) + return -28; + if (v) { + if (Atomics.load(o(), S >> 2) != A) + return -6; + for (var de = performance.now(), ue = de + M, le = Atomics.exchange(o(), $i >> 2, S); ; ) { + if (de = performance.now(), de > ue) + return le = Atomics.exchange(o(), $i >> 2, 0), -73; + if (le = Atomics.exchange(o(), $i >> 2, 0), le == 0) + break; + if (Ac(), Atomics.load(o(), S >> 2) != A) + return -6; + le = Atomics.exchange(o(), $i >> 2, S); + } + return 0; + } else { + var Y = Atomics.wait(o(), S >> 2, A, M); + if (Y === "timed-out") + return -73; + if (Y === "not-equal") + return -6; + if (Y === "ok") + return 0; + throw "Atomics.wait returned an unexpected value " + Y; + } + } + function mh(S, A, M) { + i().copyWithin(S, A, A + M); + } + function gh() { + return k ? AT().cpus().length : navigator.hardwareConcurrency; + } + function tr(S, A) { + for (var M = arguments.length - 2, Y = xu(), de = M, ue = Ti(de * 8), le = ue >> 3, ye = 0; ye < M; ye++) { + var et = arguments[2 + ye]; + c()[le + ye] = et; + } + var zt = Nv(S, de, ue, A); + return Ni(Y), zt; + } + var hu = [], fu = []; + function bh(S, A) { + fu.length = 0; + var M; + for (A >>= 2; M = i()[S++]; ) { + var Y = M < 105; + Y && A & 1 && A++, fu.push(Y ? c()[A++ >> 1] : o()[A]), ++A; + } + return fu; + } + function yh(S, A, M) { + hu.length = A; + for (var Y = M >> 3, de = 0; de < A; de++) + hu[de] = c()[Y + de]; + var ue = S < 0, le = ue ? fc[-S - 1] : Hh[S]; + return le.apply(null, hu); + } + function vh() { + return i().length; + } + function xh(S) { + try { + return ee.grow(S - Qe.byteLength + 65535 >>> 16), fn(ee.buffer), 1; + } catch (A) { + } + } + function wh(S) { + var A = vh(); + if (S <= A) + return false; + var M = 2147483648; + if (S > M) + return false; + for (var Y = 1; Y <= 4; Y *= 2) { + var de = A * (1 + 0.2 / Y); + de = Math.min(de, S + 100663296); + var ue = Math.min(M, pn(Math.max(S, de), 65536)), le = xh(ue); + if (le) + return true; + } + return false; + } + var Pe = { inEventHandler: 0, removeAllEventListeners: function() { + for (var S = Pe.eventHandlers.length - 1; S >= 0; --S) + Pe._removeHandler(S); + Pe.eventHandlers = [], Pe.deferredCalls = []; + }, registerRemoveEventListeners: function() { + Pe.removeEventListenersRegistered || (cu.push(Pe.removeAllEventListeners), Pe.removeEventListenersRegistered = true); + }, deferredCalls: [], deferCall: function(S, A, M) { + function Y(le, ye) { + if (le.length != ye.length) + return false; + for (var et in le) + if (le[et] != ye[et]) + return false; + return true; + } + for (var de in Pe.deferredCalls) { + var ue = Pe.deferredCalls[de]; + if (ue.targetFunction == S && Y(ue.argsList, M)) + return; + } + Pe.deferredCalls.push({ targetFunction: S, precedence: A, argsList: M }), Pe.deferredCalls.sort(function(le, ye) { + return le.precedence < ye.precedence; + }); + }, removeDeferredCalls: function(S) { + for (var A = 0; A < Pe.deferredCalls.length; ++A) + Pe.deferredCalls[A].targetFunction == S && (Pe.deferredCalls.splice(A, 1), --A); + }, canPerformEventHandlerRequests: function() { + return Pe.inEventHandler && Pe.currentEventHandler.allowsDeferredCalls; + }, runDeferredCalls: function() { + if (!!Pe.canPerformEventHandlerRequests()) + for (var S = 0; S < Pe.deferredCalls.length; ++S) { + var A = Pe.deferredCalls[S]; + Pe.deferredCalls.splice(S, 1), --S, A.targetFunction.apply(null, A.argsList); + } + }, eventHandlers: [], removeAllHandlersOnTarget: function(S, A) { + for (var M = 0; M < Pe.eventHandlers.length; ++M) + Pe.eventHandlers[M].target == S && (!A || A == Pe.eventHandlers[M].eventTypeString) && Pe._removeHandler(M--); + }, _removeHandler: function(S) { + var A = Pe.eventHandlers[S]; + A.target.removeEventListener(A.eventTypeString, A.eventListenerFunc, A.useCapture), Pe.eventHandlers.splice(S, 1); + }, registerOrRemoveHandler: function(S) { + var A = function(de) { + ++Pe.inEventHandler, Pe.currentEventHandler = S, Pe.runDeferredCalls(), S.handlerFunc(de), Pe.runDeferredCalls(), --Pe.inEventHandler; + }; + if (S.callbackfunc) + S.eventListenerFunc = A, S.target.addEventListener(S.eventTypeString, A, S.useCapture), Pe.eventHandlers.push(S), Pe.registerRemoveEventListeners(); + else + for (var M = 0; M < Pe.eventHandlers.length; ++M) + Pe.eventHandlers[M].target == S.target && Pe.eventHandlers[M].eventTypeString == S.eventTypeString && Pe._removeHandler(M--); + }, queueEventHandlerOnThread_iiii: function(S, A, M, Y, de) { + var ue = xu(), le = Ti(12); + o()[le >> 2] = M, o()[le + 4 >> 2] = Y, o()[le + 8 >> 2] = de, gf(0, S, 637534208, A, Y, le), Ni(ue); + }, getTargetThreadForEventCallback: function(S) { + switch (S) { + case 1: + return 0; + case 2: + return ke.currentProxiedOperationCallerThread; + default: + return S; + } + }, getNodeNameForTarget: function(S) { + return S ? S == window ? "#window" : S == screen ? "#screen" : S && S.nodeName ? S.nodeName : "" : ""; + }, fullscreenEnabled: function() { + return document.fullscreenEnabled || document.webkitFullscreenEnabled; + } }; + function kh(S) { + var A = at(S) + 1, M = Br(A); + return Ye(S, M, A), M; + } + function Ih(S, A, M, Y) { + var de = xu(), ue = Ti(12), le = 0; + A && (le = kh(A)), o()[ue >> 2] = le, o()[ue + 4 >> 2] = M, o()[ue + 8 >> 2] = Y, gf(0, S, 657457152, 0, le, ue), Ni(de); + } + function Sh(S, A, M, Y) { + A = A ? Be(A) : "", Ih(S, A, M, Y); + } + function Ch(S) { + return S > 2 ? Be(S) : S; + } + var Nh = [0, typeof document != "undefined" ? document : 0, typeof window != "undefined" ? window : 0]; + function Th(S) { + S = Ch(S); + var A = Nh[S] || (typeof document != "undefined" ? document.querySelector(S) : void 0); + return A; + } + function mu(S) { + return Th(S); + } + function bc(S, A, M) { + var Y = mu(S); + if (!Y) + return -4; + if (Y.canvasSharedPtr && (o()[Y.canvasSharedPtr >> 2] = A, o()[Y.canvasSharedPtr + 4 >> 2] = M), Y.offscreenCanvas || !Y.controlTransferredOffscreen) { + Y.offscreenCanvas && (Y = Y.offscreenCanvas); + var de = false; + if (Y.GLctxObject && Y.GLctxObject.GLctx) { + var ue = Y.GLctxObject.GLctx.getParameter(2978); + de = ue[0] === 0 && ue[1] === 0 && ue[2] === Y.width && ue[3] === Y.height; + } + Y.width = A, Y.height = M, de && Y.GLctxObject.GLctx.viewport(0, 0, A, M); + } else if (Y.canvasSharedPtr) { + var le = o()[Y.canvasSharedPtr + 8 >> 2]; + return Sh(le, S, A, M), 1; + } else + return -4; + return 0; + } + function yc(S, A, M) { + return C ? tr(2, 1, S, A, M) : bc(S, A, M); + } + function $h(S, A, M) { + var Y = mu(S); + return Y ? bc(S, A, M) : yc(S, A, M); + } + function Ah(S) { + } + function Eh(S, A) { + } + function _h(S) { + var A = S.getExtension("ANGLE_instanced_arrays"); + if (A) + return S.vertexAttribDivisor = function(M, Y) { + A.vertexAttribDivisorANGLE(M, Y); + }, S.drawArraysInstanced = function(M, Y, de, ue) { + A.drawArraysInstancedANGLE(M, Y, de, ue); + }, S.drawElementsInstanced = function(M, Y, de, ue, le) { + A.drawElementsInstancedANGLE(M, Y, de, ue, le); + }, 1; + } + function Rh(S) { + var A = S.getExtension("OES_vertex_array_object"); + if (A) + return S.createVertexArray = function() { + return A.createVertexArrayOES(); + }, S.deleteVertexArray = function(M) { + A.deleteVertexArrayOES(M); + }, S.bindVertexArray = function(M) { + A.bindVertexArrayOES(M); + }, S.isVertexArray = function(M) { + return A.isVertexArrayOES(M); + }, 1; + } + function Dh(S) { + var A = S.getExtension("WEBGL_draw_buffers"); + if (A) + return S.drawBuffers = function(M, Y) { + A.drawBuffersWEBGL(M, Y); + }, 1; + } + function Fh(S) { + return !!(S.multiDrawWebgl = S.getExtension("WEBGL_multi_draw")); + } + var Je = { counter: 1, buffers: [], programs: [], framebuffers: [], renderbuffers: [], textures: [], uniforms: [], shaders: [], vaos: [], contexts: {}, offscreenCanvases: {}, timerQueriesEXT: [], programInfos: {}, stringCache: {}, unpackAlignment: 4, recordError: function(A) { + Je.lastError || (Je.lastError = A); + }, getNewId: function(S) { + for (var A = Je.counter++, M = S.length; M < A; M++) + S[M] = null; + return A; + }, getSource: function(S, A, M, Y) { + for (var de = "", ue = 0; ue < A; ++ue) { + var le = Y ? o()[Y + ue * 4 >> 2] : -1; + de += Be(o()[M + ue * 4 >> 2], le < 0 ? void 0 : le); + } + return de; + }, createContext: function(S, A) { + var M = S.getContext("webgl", A); + if (!M) + return 0; + var Y = Je.registerContext(M, A); + return Y; + }, registerContext: function(S, A) { + var M = Br(8); + o()[M + 4 >> 2] = rr(); + var Y = { handle: M, attributes: A, version: A.majorVersion, GLctx: S }; + return S.canvas && (S.canvas.GLctxObject = Y), Je.contexts[M] = Y, (typeof A.enableExtensionsByDefault == "undefined" || A.enableExtensionsByDefault) && Je.initExtensions(Y), M; + }, makeContextCurrent: function(S) { + return Je.currentContext = Je.contexts[S], l.ctx = nr = Je.currentContext && Je.currentContext.GLctx, !(S && !nr); + }, getContext: function(S) { + return Je.contexts[S]; + }, deleteContext: function(S) { + Je.currentContext === Je.contexts[S] && (Je.currentContext = null), typeof Pe == "object" && Pe.removeAllHandlersOnTarget(Je.contexts[S].GLctx.canvas), Je.contexts[S] && Je.contexts[S].GLctx.canvas && (Je.contexts[S].GLctx.canvas.GLctxObject = void 0), vu(Je.contexts[S].handle), Je.contexts[S] = null; + }, initExtensions: function(S) { + if (S || (S = Je.currentContext), !S.initExtensionsDone) { + S.initExtensionsDone = true; + var A = S.GLctx; + _h(A), Rh(A), Dh(A), A.disjointTimerQueryExt = A.getExtension("EXT_disjoint_timer_query"), Fh(A); + var M = A.getSupportedExtensions() || []; + M.forEach(function(Y) { + Y.indexOf("lose_context") < 0 && Y.indexOf("debug") < 0 && A.getExtension(Y); + }); + } + }, populateUniformTable: function(S) { + for (var A = Je.programs[S], M = Je.programInfos[S] = { uniforms: {}, maxUniformLength: 0, maxAttributeLength: -1, maxUniformBlockNameLength: -1 }, Y = M.uniforms, de = nr.getProgramParameter(A, 35718), ue = 0; ue < de; ++ue) { + var le = nr.getActiveUniform(A, ue), ye = le.name; + M.maxUniformLength = Math.max(M.maxUniformLength, ye.length + 1), ye.slice(-1) == "]" && (ye = ye.slice(0, ye.lastIndexOf("["))); + var et = nr.getUniformLocation(A, ye); + if (et) { + var zt = Je.getNewId(Je.uniforms); + Y[ye] = [le.size, zt], Je.uniforms[zt] = et; + for (var Et = 1; Et < le.size; ++Et) { + var ar = ye + "[" + Et + "]"; + et = nr.getUniformLocation(A, ar), zt = Je.getNewId(Je.uniforms), Je.uniforms[zt] = et; + } + } + } + } }, Oh = ["default", "low-power", "high-performance"]; + function Ph(S, A) { + var M = A >> 2, Y = o()[M + 6], de = { alpha: !!o()[M + 0], depth: !!o()[M + 1], stencil: !!o()[M + 2], antialias: !!o()[M + 3], premultipliedAlpha: !!o()[M + 4], preserveDrawingBuffer: !!o()[M + 5], powerPreference: Oh[Y], failIfMajorPerformanceCaveat: !!o()[M + 7], majorVersion: o()[M + 8], minorVersion: o()[M + 9], enableExtensionsByDefault: o()[M + 10], explicitSwapControl: o()[M + 11], proxyContextToMainThread: o()[M + 12], renderViaOffscreenBackBuffer: o()[M + 13] }, ue = mu(S); + if (!ue || de.explicitSwapControl) + return 0; + var le = Je.createContext(ue, de); + return le; + } + function zh(S, A) { + return Ph(S, A); + } + var ki = { mappings: {}, buffers: [null, [], []], printChar: function(S, A) { + var M = ki.buffers[S]; + A === 0 || A === 10 ? ((S === 1 ? K : j)(We(M, 0)), M.length = 0) : M.push(A); + }, varargs: void 0, get: function() { + ki.varargs += 4; + var S = o()[ki.varargs - 4 >> 2]; + return S; + }, getStr: function(S) { + var A = Be(S); + return A; + }, get64: function(S, A) { + return S; + } }; + function vc(S) { + return C ? tr(3, 1, S) : 0; + } + function xc(S, A, M, Y, de) { + if (C) + return tr(4, 1, S, A, M, Y, de); + } + function wc(S, A, M, Y) { + if (C) + return tr(5, 1, S, A, M, Y); + for (var de = 0, ue = 0; ue < M; ue++) { + for (var le = o()[A + ue * 8 >> 2], ye = o()[A + (ue * 8 + 4) >> 2], et = 0; et < ye; et++) + ki.printChar(S, i()[le + et]); + de += ye; + } + return o()[Y >> 2] = de, 0; + } + function Mh(S) { + var A = ke.threadExitHandlers.pop(); + S && A(); + } + function Lh(S, A) { + ke.threadExitHandlers.push(function() { + Qt.get(S)(A); + }); + } + function kc(S) { + if (C) + throw "Internal Error! spawnThread() can only ever be called from main application thread!"; + var A = ke.getNewWorker(); + if (A.pthread !== void 0) + throw "Internal error!"; + if (!S.pthread_ptr) + throw "Internal error, no pthread ptr!"; + ke.runningWorkers.push(A); + for (var M = Br(128 * 4), Y = 0; Y < 128; ++Y) + o()[M + Y * 4 >> 2] = 0; + var de = S.stackBase + S.stackSize, ue = ke.pthreads[S.pthread_ptr] = { worker: A, stackBase: S.stackBase, stackSize: S.stackSize, allocatedOwnStack: S.allocatedOwnStack, threadInfoStruct: S.pthread_ptr }, le = ue.threadInfoStruct >> 2; + Atomics.store(u(), le + 16, S.detached), Atomics.store(u(), le + 25, M), Atomics.store(u(), le + 10, ue.threadInfoStruct), Atomics.store(u(), le + 20, S.stackSize), Atomics.store(u(), le + 19, de), Atomics.store(u(), le + 26, S.stackSize), Atomics.store(u(), le + 28, de), Atomics.store(u(), le + 29, S.detached); + var ye = xv(), et = ye + 40; + Atomics.store(u(), le + 43, et), A.pthread = ue; + var zt = { cmd: "run", start_routine: S.startRoutine, arg: S.arg, threadInfoStruct: S.pthread_ptr, stackBase: S.stackBase, stackSize: S.stackSize }; + A.runPthread = function() { + zt.time = performance.now(), A.postMessage(zt, S.transferList); + }, A.loaded && (A.runPthread(), delete A.runPthread); + } + function Bh(S, A, M, Y) { + if (typeof SharedArrayBuffer == "undefined") + return j("Current environment does not support SharedArrayBuffer, pthreads are not available!"), 6; + if (!S) + return j("pthread_create called with a null thread pointer!"), 28; + var de = [], ue = 0; + if (C && (de.length === 0 || ue)) + return Cv(687865856, S, A, M, Y); + if (ue) + return ue; + var le = 0, ye = 0, et = 0; + A && A != -1 ? (le = o()[A >> 2], le += 81920, ye = o()[A + 8 >> 2], et = o()[A + 12 >> 2] !== 0) : le = 2097152; + var zt = ye == 0; + zt ? ye = $v(16, le) : (ye -= le, we(ye > 0)); + for (var Et = Br(228), ar = 0; ar < 57; ++ar) + u()[(Et >> 2) + ar] = 0; + o()[S >> 2] = Et, o()[Et + 12 >> 2] = Et; + var Ai = Et + 152; + o()[Ai >> 2] = Ai; + var en = { stackBase: ye, stackSize: le, allocatedOwnStack: zt, detached: et, startRoutine: M, pthread_ptr: Et, arg: Y, transferList: de }; + return C ? (en.cmd = "spawnThread", postMessage(en, de)) : kc(en), 0; + } + function Vh() { + if (!!C) { + var S = rr(); + if (!!S) { + var A = Atomics.load(u(), S + 56 >> 2); + if (!A) { + var M = Atomics.load(u(), S + 0 >> 2); + if (M == 2) + throw "Canceled!"; + } + } + } + } + function Wh() { + k || x || Q("Blocking on the main thread is very dangerous, see https://emscripten.org/docs/porting/pthreads.html#blocking-on-the-main-browser-thread"); + } + function Uh(S, A, M) { + if (!S) + return j("pthread_join attempted on a null thread pointer!"), er.ESRCH; + if (C && rr() == S) + return j("PThread " + S + " is attempting to join to itself!"), er.EDEADLK; + if (!C && Iv() == S) + return j("Main thread " + S + " is attempting to join to itself!"), er.EDEADLK; + var Y = o()[S + 12 >> 2]; + if (Y !== S) + return j("pthread_join attempted on thread " + S + ", which does not point to a valid thread, or does not exist anymore!"), er.ESRCH; + var de = Atomics.load(u(), S + 64 >> 2); + if (de) + return j("Attempted to join thread " + S + ", which was already detached!"), er.EINVAL; + for (M && Wh(); ; ) { + var ue = Atomics.load(u(), S + 0 >> 2); + if (ue == 1) { + var le = Atomics.load(u(), S + 4 >> 2); + return A && (o()[A >> 2] = le), Atomics.store(u(), S + 64 >> 2, 1), C ? postMessage({ cmd: "cleanupThread", thread: S }) : mc(S), 0; + } + if (!M) + return er.EBUSY; + Vh(), C || Ac(), gc(S + 0, ue, C ? 100 : 1); + } + } + function Gh(S, A) { + return Uh(S, A, true); + } + function Ic(S) { + if (C) + return tr(6, 1, S); + switch (S) { + case 30: + return 16384; + case 85: + var A = 2147483648; + return A / 16384; + case 132: + case 133: + case 12: + case 137: + case 138: + case 15: + case 235: + case 16: + case 17: + case 18: + case 19: + case 20: + case 149: + case 13: + case 10: + case 236: + case 153: + case 9: + case 21: + case 22: + case 159: + case 154: + case 14: + case 77: + case 78: + case 139: + case 82: + case 68: + case 67: + case 164: + case 11: + case 29: + case 47: + case 48: + case 95: + case 52: + case 51: + case 46: + return 200809; + case 27: + case 246: + case 127: + case 128: + case 23: + case 24: + case 160: + case 161: + case 181: + case 182: + case 242: + case 183: + case 184: + case 243: + case 244: + case 245: + case 165: + case 178: + case 179: + case 49: + case 50: + case 168: + case 169: + case 175: + case 170: + case 171: + case 172: + case 97: + case 76: + case 32: + case 173: + case 35: + case 80: + case 81: + case 79: + return -1; + case 176: + case 177: + case 7: + case 155: + case 8: + case 157: + case 125: + case 126: + case 92: + case 93: + case 129: + case 130: + case 131: + case 94: + case 91: + return 1; + case 74: + case 60: + case 69: + case 70: + case 4: + return 1024; + case 31: + case 42: + case 72: + return 32; + case 87: + case 26: + case 33: + return 2147483647; + case 34: + case 1: + return 47839; + case 38: + case 36: + return 99; + case 43: + case 37: + return 2048; + case 0: + return 2097152; + case 3: + return 65536; + case 28: + return 32768; + case 44: + return 32767; + case 75: + return 16384; + case 39: + return 1e3; + case 89: + return 700; + case 71: + return 256; + case 40: + return 255; + case 2: + return 100; + case 180: + return 64; + case 25: + return 20; + case 5: + return 16; + case 6: + return 6; + case 73: + return 4; + case 84: + return typeof navigator == "object" && navigator.hardwareConcurrency || 1; + } + return lh(28), -1; + } + C || ke.initMainThreadBlock(); + var nr, Hh = [null, ch, yc, vc, xc, wc, Ic], qh = { e: oh, r: uh, x: dh, b: ph, y: hh, j: fh, d: gc, c: pu, f: Lr, p: mh, A: gh, u: yh, q: wh, v: $h, i: Ah, s: Eh, w: zh, l: vc, n: xc, g: wc, o: th, a: ee || l.wasmMemory, z: Mh, k: Lh, h: Bh, m: Gh, t: Ic }, yv = eh(), Sc = l.___wasm_call_ctors = function() { + return (Sc = l.___wasm_call_ctors = l.asm.B).apply(null, arguments); + }, jh = l._init = function() { + return (jh = l._init = l.asm.C).apply(null, arguments); + }, Kh = l._init_with_threads_count = function() { + return (Kh = l._init_with_threads_count = l.asm.D).apply(null, arguments); + }, Xh = l._get_threads_count = function() { + return (Xh = l._get_threads_count = l.asm.E).apply(null, arguments); + }, Yh = l._register_tensor = function() { + return (Yh = l._register_tensor = l.asm.F).apply(null, arguments); + }, Qh = l._dispose_data = function() { + return (Qh = l._dispose_data = l.asm.G).apply(null, arguments); + }, Zh = l._dispose = function() { + return (Zh = l._dispose = l.asm.H).apply(null, arguments); + }, Jh = l._Abs = function() { + return (Jh = l._Abs = l.asm.I).apply(null, arguments); + }, ef = l._Add = function() { + return (ef = l._Add = l.asm.J).apply(null, arguments); + }, tf = l._AddN = function() { + return (tf = l._AddN = l.asm.K).apply(null, arguments); + }, nf = l._All = function() { + return (nf = l._All = l.asm.L).apply(null, arguments); + }, sf = l._Any = function() { + return (sf = l._Any = l.asm.M).apply(null, arguments); + }, rf = l._ArgMax = function() { + return (rf = l._ArgMax = l.asm.N).apply(null, arguments); + }, af = l._AvgPool = function() { + return (af = l._AvgPool = l.asm.O).apply(null, arguments); + }, of = l._BatchMatMul = function() { + return (of = l._BatchMatMul = l.asm.P).apply(null, arguments); + }, uf = l._Ceil = function() { + return (uf = l._Ceil = l.asm.Q).apply(null, arguments); + }, lf = l._ClipByValue = function() { + return (lf = l._ClipByValue = l.asm.R).apply(null, arguments); + }, cf = l._Conv2D = function() { + return (cf = l._Conv2D = l.asm.S).apply(null, arguments); + }, df = l._Conv2DBackpropInput = function() { + return (df = l._Conv2DBackpropInput = l.asm.T).apply(null, arguments); + }, pf = l._Cos = function() { + return (pf = l._Cos = l.asm.U).apply(null, arguments); + }, hf = l._Cosh = function() { + return (hf = l._Cosh = l.asm.V).apply(null, arguments); + }, Cc = l._CropAndResize = function() { + return (Cc = l._CropAndResize = l.asm.W).apply(null, arguments); + }, Nc = l._Cumsum = function() { + return (Nc = l._Cumsum = l.asm.X).apply(null, arguments); + }, gu = l._DepthToSpace = function() { + return (gu = l._DepthToSpace = l.asm.Y).apply(null, arguments); + }, Ii = l._DepthwiseConv2dNative = function() { + return (Ii = l._DepthwiseConv2dNative = l.asm.Z).apply(null, arguments); + }, ff = l._Elu = function() { + return (ff = l._Elu = l.asm._).apply(null, arguments); + }, bu = l._Equal = function() { + return (bu = l._Equal = l.asm.$).apply(null, arguments); + }, Si = l._Exp = function() { + return (Si = l._Exp = l.asm.aa).apply(null, arguments); + }, Ci = l._FlipLeftRight = function() { + return (Ci = l._FlipLeftRight = l.asm.ba).apply(null, arguments); + }, mf = l._Floor = function() { + return (mf = l._Floor = l.asm.ca).apply(null, arguments); + }, Z = l._FloorDiv = function() { + return (Z = l._FloorDiv = l.asm.da).apply(null, arguments); + }, re = l._FusedBatchNorm = function() { + return (re = l._FusedBatchNorm = l.asm.ea).apply(null, arguments); + }, ge = l._FusedConv2D = function() { + return (ge = l._FusedConv2D = l.asm.fa).apply(null, arguments); + }, Ze = l._FusedDepthwiseConv2D = function() { + return (Ze = l._FusedDepthwiseConv2D = l.asm.ga).apply(null, arguments); + }, xt = l._Gather = function() { + return (xt = l._Gather = l.asm.ha).apply(null, arguments); + }, ft = l._GatherNd = function() { + return (ft = l._GatherNd = l.asm.ia).apply(null, arguments); + }, Ve = l._Greater = function() { + return (Ve = l._Greater = l.asm.ja).apply(null, arguments); + }, Ue = l._GreaterEqual = function() { + return (Ue = l._GreaterEqual = l.asm.ka).apply(null, arguments); + }, qt = l._LeakyRelu = function() { + return (qt = l._LeakyRelu = l.asm.la).apply(null, arguments); + }, Ms = l._Less = function() { + return (Ms = l._Less = l.asm.ma).apply(null, arguments); + }, Ls = l._LessEqual = function() { + return (Ls = l._LessEqual = l.asm.na).apply(null, arguments); + }, Tc = l._Log = function() { + return (Tc = l._Log = l.asm.oa).apply(null, arguments); + }, yu = l._LogicalAnd = function() { + return (yu = l._LogicalAnd = l.asm.pa).apply(null, arguments); + }, Nn = l._Max = function() { + return (Nn = l._Max = l.asm.qa).apply(null, arguments); + }, sr = l._MaxPool = function() { + return (sr = l._MaxPool = l.asm.ra).apply(null, arguments); + }, $c = l._Maximum = function() { + return ($c = l._Maximum = l.asm.sa).apply(null, arguments); + }, mN = l._Mean = function() { + return (mN = l._Mean = l.asm.ta).apply(null, arguments); + }, gN = l._Min = function() { + return (gN = l._Min = l.asm.ua).apply(null, arguments); + }, bN = l._Minimum = function() { + return (bN = l._Minimum = l.asm.va).apply(null, arguments); + }, yN = l._MirrorPad = function() { + return (yN = l._MirrorPad = l.asm.wa).apply(null, arguments); + }, vN = l._Multiply = function() { + return (vN = l._Multiply = l.asm.xa).apply(null, arguments); + }, xN = l._Neg = function() { + return (xN = l._Neg = l.asm.ya).apply(null, arguments); + }, wN = l._NonMaxSuppressionV3 = function() { + return (wN = l._NonMaxSuppressionV3 = l.asm.za).apply(null, arguments); + }, kN = l._NonMaxSuppressionV4 = function() { + return (kN = l._NonMaxSuppressionV4 = l.asm.Aa).apply(null, arguments); + }, IN = l._NonMaxSuppressionV5 = function() { + return (IN = l._NonMaxSuppressionV5 = l.asm.Ba).apply(null, arguments); + }, SN = l._NotEqual = function() { + return (SN = l._NotEqual = l.asm.Ca).apply(null, arguments); + }, CN = l._OneHot = function() { + return (CN = l._OneHot = l.asm.Da).apply(null, arguments); + }, NN = l._PadV2 = function() { + return (NN = l._PadV2 = l.asm.Ea).apply(null, arguments); + }, TN = l._Pow = function() { + return (TN = l._Pow = l.asm.Fa).apply(null, arguments); + }, $N = l._Prelu = function() { + return ($N = l._Prelu = l.asm.Ga).apply(null, arguments); + }, AN = l._Prod = function() { + return (AN = l._Prod = l.asm.Ha).apply(null, arguments); + }, EN = l._RealDiv = function() { + return (EN = l._RealDiv = l.asm.Ia).apply(null, arguments); + }, _N = l._Relu = function() { + return (_N = l._Relu = l.asm.Ja).apply(null, arguments); + }, RN = l._Relu6 = function() { + return (RN = l._Relu6 = l.asm.Ka).apply(null, arguments); + }, DN = l._ResizeBilinear = function() { + return (DN = l._ResizeBilinear = l.asm.La).apply(null, arguments); + }, FN = l._Reverse = function() { + return (FN = l._Reverse = l.asm.Ma).apply(null, arguments); + }, ON = l._RotateWithOffset = function() { + return (ON = l._RotateWithOffset = l.asm.Na).apply(null, arguments); + }, PN = l._Round = function() { + return (PN = l._Round = l.asm.Oa).apply(null, arguments); + }, zN = l._Rsqrt = function() { + return (zN = l._Rsqrt = l.asm.Pa).apply(null, arguments); + }, MN = l._ScatterNd = function() { + return (MN = l._ScatterNd = l.asm.Qa).apply(null, arguments); + }, LN = l._SelectV2 = function() { + return (LN = l._SelectV2 = l.asm.Ra).apply(null, arguments); + }, BN = l._Sigmoid = function() { + return (BN = l._Sigmoid = l.asm.Sa).apply(null, arguments); + }, VN = l._Sin = function() { + return (VN = l._Sin = l.asm.Ta).apply(null, arguments); + }, WN = l._Softmax = function() { + return (WN = l._Softmax = l.asm.Ua).apply(null, arguments); + }, UN = l._SparseFillEmptyRows = function() { + return (UN = l._SparseFillEmptyRows = l.asm.Va).apply(null, arguments); + }, GN = l._SparseReshape = function() { + return (GN = l._SparseReshape = l.asm.Wa).apply(null, arguments); + }, HN = l._SparseSegmentReduction = function() { + return (HN = l._SparseSegmentReduction = l.asm.Xa).apply(null, arguments); + }, qN = l._Sqrt = function() { + return (qN = l._Sqrt = l.asm.Ya).apply(null, arguments); + }, jN = l._Square = function() { + return (jN = l._Square = l.asm.Za).apply(null, arguments); + }, KN = l._SquaredDifference = function() { + return (KN = l._SquaredDifference = l.asm._a).apply(null, arguments); + }, XN = l._Step = function() { + return (XN = l._Step = l.asm.$a).apply(null, arguments); + }, YN = l._StridedSlice = function() { + return (YN = l._StridedSlice = l.asm.ab).apply(null, arguments); + }, QN = l._Sub = function() { + return (QN = l._Sub = l.asm.bb).apply(null, arguments); + }, ZN = l._Sum = function() { + return (ZN = l._Sum = l.asm.cb).apply(null, arguments); + }, JN = l._Tan = function() { + return (JN = l._Tan = l.asm.db).apply(null, arguments); + }, eT = l._Tanh = function() { + return (eT = l._Tanh = l.asm.eb).apply(null, arguments); + }, tT = l._Tile = function() { + return (tT = l._Tile = l.asm.fb).apply(null, arguments); + }, nT = l._TopK = function() { + return (nT = l._TopK = l.asm.gb).apply(null, arguments); + }, sT = l._Transform = function() { + return (sT = l._Transform = l.asm.hb).apply(null, arguments); + }, rT = l._Transpose = function() { + return (rT = l._Transpose = l.asm.ib).apply(null, arguments); + }, aT = l.__FusedMatMul = function() { + return (aT = l.__FusedMatMul = l.asm.jb).apply(null, arguments); + }, Br = l._malloc = function() { + return (Br = l._malloc = l.asm.kb).apply(null, arguments); + }, vu = l._free = function() { + return (vu = l._free = l.asm.lb).apply(null, arguments); + }, vv = l.___errno_location = function() { + return (vv = l.___errno_location = l.asm.mb).apply(null, arguments); + }, xv = l._emscripten_get_global_libc = function() { + return (xv = l._emscripten_get_global_libc = l.asm.ob).apply(null, arguments); + }, rr = l._pthread_self = function() { + return (rr = l._pthread_self = l.asm.pb).apply(null, arguments); + }, wv = l.___pthread_tsd_run_dtors = function() { + return (wv = l.___pthread_tsd_run_dtors = l.asm.qb).apply(null, arguments); + }, Ac = l._emscripten_main_thread_process_queued_calls = function() { + return (Ac = l._emscripten_main_thread_process_queued_calls = l.asm.rb).apply(null, arguments); + }, iT = l._emscripten_current_thread_process_queued_calls = function() { + return (iT = l._emscripten_current_thread_process_queued_calls = l.asm.sb).apply(null, arguments); + }, kv = l._emscripten_register_main_browser_thread_id = function() { + return (kv = l._emscripten_register_main_browser_thread_id = l.asm.tb).apply(null, arguments); + }, Iv = l._emscripten_main_browser_thread_id = function() { + return (Iv = l._emscripten_main_browser_thread_id = l.asm.ub).apply(null, arguments); + }, Sv = l.__emscripten_do_dispatch_to_thread = function() { + return (Sv = l.__emscripten_do_dispatch_to_thread = l.asm.vb).apply(null, arguments); + }, Cv = l._emscripten_sync_run_in_main_thread_4 = function() { + return (Cv = l._emscripten_sync_run_in_main_thread_4 = l.asm.wb).apply(null, arguments); + }, Nv = l._emscripten_run_in_main_runtime_thread_js = function() { + return (Nv = l._emscripten_run_in_main_runtime_thread_js = l.asm.xb).apply(null, arguments); + }, gf = l.__emscripten_call_on_thread = function() { + return (gf = l.__emscripten_call_on_thread = l.asm.yb).apply(null, arguments); + }, oT = l._emscripten_tls_init = function() { + return (oT = l._emscripten_tls_init = l.asm.zb).apply(null, arguments); + }, bf = l.__emscripten_thread_init = function() { + return (bf = l.__emscripten_thread_init = l.asm.Ab).apply(null, arguments); + }, xu = l.stackSave = function() { + return (xu = l.stackSave = l.asm.Bb).apply(null, arguments); + }, Ni = l.stackRestore = function() { + return (Ni = l.stackRestore = l.asm.Cb).apply(null, arguments); + }, Ti = l.stackAlloc = function() { + return (Ti = l.stackAlloc = l.asm.Db).apply(null, arguments); + }, Tv = l._emscripten_stack_set_limits = function() { + return (Tv = l._emscripten_stack_set_limits = l.asm.Eb).apply(null, arguments); + }, $v = l._memalign = function() { + return ($v = l._memalign = l.asm.Fb).apply(null, arguments); + }, Av = l.__emscripten_allow_main_runtime_queued_calls = 10208, $i = l.__emscripten_main_thread_futex = 10412; + l.cwrap = Xe, l.PThread = ke, l.PThread = ke, l.wasmMemory = ee, l.ExitStatus = wu; + var Ec; + function wu(S) { + this.name = "ExitStatus", this.message = "Program terminated with exit(" + S + ")", this.status = S; + } + zr = function S() { + Ec || yf(), Ec || (zr = S); + }; + function yf(S) { + if (S = S || g, jn > 0) + return; + if (C) { + d(l), uc(), postMessage({ cmd: "loaded" }); + return; + } + if (oc(), jn > 0) + return; + function A() { + Ec || (Ec = true, l.calledRun = true, !me && (uc(), lc(), d(l), l.onRuntimeInitialized && l.onRuntimeInitialized(), cc())); + } + l.setStatus ? (l.setStatus("Running..."), setTimeout(function() { + setTimeout(function() { + l.setStatus(""); + }, 1), A(); + }, 1)) : A(); + } + l.run = yf; + function uT(S, A) { + if (!(A && ie && S === 0)) { + if (!A && C) + throw postMessage({ cmd: "exitProcess", returnCode: S }), new wu(S); + ie || (ke.terminateAllThreads(), Ie = S, mn(), l.onExit && l.onExit(S), me = true), y(S, new wu(S)); + } + } + if (l.preInit) + for (typeof l.preInit == "function" && (l.preInit = [l.preInit]); l.preInit.length > 0; ) + l.preInit.pop()(); + C && (ie = false, ke.initWorker()), yf(); + var _c; + h && (_c = { uncaughtException: process.listeners("uncaughtException").filter(function(S) { + return !h.uncaughtException.indexOf(S) > -1; + }), unhandledRejection: process.listeners("unhandledRejection").filter(function(S) { + return !h.unhandledRejection.indexOf(S) > -1; + }) }); + var Rc; + if (typeof WasmBackendModule != "undefined") + Rc = WasmBackendModule; + else if (typeof r != "undefined") + Rc = r; + else + throw new Error("Could not find wasm module in post.js"); + if (_c) { + var lT = Rc._dispose; + Rc._dispose = function() { + lT(), _c.uncaughtException.forEach(function(S) { + process.removeListener("uncaughtException", S); + }), _c.unhandledRejection.forEach(function(S) { + process.removeListener("unhandledRejection", S); + }); + }; + } + return r.ready; + }; + }(); + typeof e == "object" && typeof t == "object" ? t.exports = n : typeof define == "function" && define.amd ? define([], function() { + return n; + }) : typeof e == "object" && (e.WasmBackendModuleThreadedSimd = n); +} }); +var _T = Pt({ "src/tfjs-backend-wasm/wasm-out/tfjs-backend-wasm.js"(e, t) { + var n = function() { + var s = typeof document != "undefined" && document.currentScript ? document.currentScript.src : void 0; + return typeof __filename != "undefined" && (s = s || __filename), function(r) { + r = r || {}; + var a = typeof r != "undefined" ? r : {}, i, o; + a.ready = new Promise(function(Z, re) { + i = Z, o = re; + }); + var u; + typeof process != "undefined" && process.listeners && (u = { uncaughtException: process.listeners("uncaughtException"), unhandledRejection: process.listeners("unhandledRejection") }); + var c = {}, l; + for (l in a) + a.hasOwnProperty(l) && (c[l] = a[l]); + var d = [], p = "./this.program", h = function(Z, re) { + throw re; + }, f = false, m = false, g = false, b = false; + f = typeof window == "object", m = typeof importScripts == "function", g = typeof process == "object" && typeof process.versions == "object" && typeof process.versions.node == "string", b = !f && !g && !m; + var y = ""; + function v(Z) { + return a.locateFile ? a.locateFile(Z, y) : y + Z; + } + var x, k, T, C, R, _; + g ? (m ? y = Ou().dirname(y) + "/" : y = __dirname + "/", x = function(re, ge) { + return R || (R = sd()), _ || (_ = Ou()), re = _.normalize(re), R.readFileSync(re, ge ? null : "utf8"); + }, T = function(re) { + var ge = x(re, true); + return ge.buffer || (ge = new Uint8Array(ge)), K(ge.buffer), ge; + }, process.argv.length > 1 && (p = process.argv[1].replace(/\\/g, "/")), d = process.argv.slice(2), process.on("uncaughtException", function(Z) { + if (!(Z instanceof ff)) + throw Z; + }), process.on("unhandledRejection", Ps), h = function(Z) { + process.exit(Z); + }, a.inspect = function() { + return "[Emscripten Module object]"; + }) : b ? (typeof read != "undefined" && (x = function(re) { + return read(re); + }), T = function(re) { + var ge; + return typeof readbuffer == "function" ? new Uint8Array(readbuffer(re)) : (ge = read(re, "binary"), K(typeof ge == "object"), ge); + }, typeof scriptArgs != "undefined" ? d = scriptArgs : typeof arguments != "undefined" && (d = arguments), typeof quit == "function" && (h = function(Z) { + quit(Z); + }), typeof print != "undefined" && (typeof console == "undefined" && (console = {}), console.log = print, console.warn = console.error = typeof printErr != "undefined" ? printErr : print)) : (f || m) && (m ? y = self.location.href : typeof document != "undefined" && document.currentScript && (y = document.currentScript.src), s && (y = s), y.indexOf("blob:") !== 0 ? y = y.substr(0, y.lastIndexOf("/") + 1) : y = "", x = function(Z) { + var re = new XMLHttpRequest(); + return re.open("GET", Z, false), re.send(null), re.responseText; + }, m && (T = function(Z) { + var re = new XMLHttpRequest(); + return re.open("GET", Z, false), re.responseType = "arraybuffer", re.send(null), new Uint8Array(re.response); + }), k = function(Z, re, ge) { + var Ze = new XMLHttpRequest(); + Ze.open("GET", Z, true), Ze.responseType = "arraybuffer", Ze.onload = function() { + if (Ze.status == 200 || Ze.status == 0 && Ze.response) { + re(Ze.response); + return; + } + ge(); + }, Ze.onerror = ge, Ze.send(null); + }, C = function(Z) { + document.title = Z; + }); + var P = a.print || console.log.bind(console), D = a.printErr || console.warn.bind(console); + for (l in c) + c.hasOwnProperty(l) && (a[l] = c[l]); + c = null, a.arguments && (d = a.arguments), a.thisProgram && (p = a.thisProgram), a.quit && (h = a.quit); + var F; + a.wasmBinary && (F = a.wasmBinary); + var $ = a.noExitRuntime || true; + typeof WebAssembly != "object" && Ps("no native wasm support detected"); + var z, W = false, q; + function K(Z, re) { + Z || Ps("Assertion failed: " + re); + } + function j(Z) { + var re = a["_" + Z]; + return K(re, "Cannot call unknown function " + Z + ", make sure it is exported"), re; + } + function Q(Z, re, ge, Ze, xt) { + var ft = { string: function(Nn) { + var sr = 0; + if (Nn != null && Nn !== 0) { + var $c = (Nn.length << 2) + 1; + sr = gu($c), ee(Nn, sr, $c); + } + return sr; + }, array: function(Nn) { + var sr = gu(Nn.length); + return ce(Nn, sr), sr; + } }; + function Ve(Nn) { + return re === "string" ? te(Nn) : re === "boolean" ? Boolean(Nn) : Nn; + } + var Ue = j(Z), qt = [], Ms = 0; + if (Ze) + for (var Ls = 0; Ls < Ze.length; Ls++) { + var Tc = ft[ge[Ls]]; + Tc ? (Ms === 0 && (Ms = Cc()), qt[Ls] = Tc(Ze[Ls])) : qt[Ls] = Ze[Ls]; + } + var yu = Ue.apply(null, qt); + return yu = Ve(yu), Ms !== 0 && Nc(Ms), yu; + } + function J(Z, re, ge, Ze) { + ge = ge || []; + var xt = ge.every(function(Ve) { + return Ve === "number"; + }), ft = re !== "string"; + return ft && xt && !Ze ? j(Z) : function() { + return Q(Z, re, ge, arguments, Ze); + }; + } + var ne = typeof TextDecoder != "undefined" ? new TextDecoder("utf8") : void 0; + function se(Z, re, ge) { + for (var Ze = re + ge, xt = re; Z[xt] && !(xt >= Ze); ) + ++xt; + if (xt - re > 16 && Z.subarray && ne) + return ne.decode(Z.subarray(re, xt)); + for (var ft = ""; re < xt; ) { + var Ve = Z[re++]; + if (!(Ve & 128)) { + ft += String.fromCharCode(Ve); + continue; + } + var Ue = Z[re++] & 63; + if ((Ve & 224) == 192) { + ft += String.fromCharCode((Ve & 31) << 6 | Ue); + continue; + } + var qt = Z[re++] & 63; + if ((Ve & 240) == 224 ? Ve = (Ve & 15) << 12 | Ue << 6 | qt : Ve = (Ve & 7) << 18 | Ue << 12 | qt << 6 | Z[re++] & 63, Ve < 65536) + ft += String.fromCharCode(Ve); + else { + var Ms = Ve - 65536; + ft += String.fromCharCode(55296 | Ms >> 10, 56320 | Ms & 1023); + } + } + return ft; + } + function te(Z, re) { + return Z ? se(Te, Z, re) : ""; + } + function ie(Z, re, ge, Ze) { + if (!(Ze > 0)) + return 0; + for (var xt = ge, ft = ge + Ze - 1, Ve = 0; Ve < Z.length; ++Ve) { + var Ue = Z.charCodeAt(Ve); + if (Ue >= 55296 && Ue <= 57343) { + var qt = Z.charCodeAt(++Ve); + Ue = 65536 + ((Ue & 1023) << 10) | qt & 1023; + } + if (Ue <= 127) { + if (ge >= ft) + break; + re[ge++] = Ue; + } else if (Ue <= 2047) { + if (ge + 1 >= ft) + break; + re[ge++] = 192 | Ue >> 6, re[ge++] = 128 | Ue & 63; + } else if (Ue <= 65535) { + if (ge + 2 >= ft) + break; + re[ge++] = 224 | Ue >> 12, re[ge++] = 128 | Ue >> 6 & 63, re[ge++] = 128 | Ue & 63; + } else { + if (ge + 3 >= ft) + break; + re[ge++] = 240 | Ue >> 18, re[ge++] = 128 | Ue >> 12 & 63, re[ge++] = 128 | Ue >> 6 & 63, re[ge++] = 128 | Ue & 63; + } + } + return re[ge] = 0, ge - xt; + } + function ee(Z, re, ge) { + return ie(Z, Te, re, ge); + } + function ce(Z, re) { + we.set(Z, re); + } + function me(Z, re) { + return Z % re > 0 && (Z += re - Z % re), Z; + } + var Ie, we, Te, Fe, Xe, We, Be, rt, Ye; + function at(Z) { + Ie = Z, a.HEAP8 = we = new Int8Array(Z), a.HEAP16 = Fe = new Int16Array(Z), a.HEAP32 = We = new Int32Array(Z), a.HEAPU8 = Te = new Uint8Array(Z), a.HEAPU16 = Xe = new Uint16Array(Z), a.HEAPU32 = Be = new Uint32Array(Z), a.HEAPF32 = rt = new Float32Array(Z), a.HEAPF64 = Ye = new Float64Array(Z); + } + var ht = a.INITIAL_MEMORY || 16777216, pn, Qe = [], Sn = [], Ht = [], Hn = [], hn = false; + Sn.push({ func: function() { + hc(); + } }); + function Cn() { + if (a.preRun) + for (typeof a.preRun == "function" && (a.preRun = [a.preRun]); a.preRun.length; ) + fn(a.preRun.shift()); + jn(Qe); + } + function Rn() { + hn = true, jn(Sn); + } + function qn() { + jn(Ht); + } + function Dn() { + if (a.postRun) + for (typeof a.postRun == "function" && (a.postRun = [a.postRun]); a.postRun.length; ) + Zs(a.postRun.shift()); + jn(Hn); + } + function fn(Z) { + Qe.unshift(Z); + } + function Zs(Z) { + Hn.unshift(Z); + } + var Qt = 0, Js = null, Fn = null; + function lu(Z) { + Qt++, a.monitorRunDependencies && a.monitorRunDependencies(Qt); + } + function cu(Z) { + if (Qt--, a.monitorRunDependencies && a.monitorRunDependencies(Qt), Qt == 0 && (Js !== null && (clearInterval(Js), Js = null), Fn)) { + var re = Fn; + Fn = null, re(); + } + } + a.preloadedImages = {}, a.preloadedAudios = {}; + function Ps(Z) { + a.onAbort && a.onAbort(Z), Z += "", D(Z), W = true, q = 1, Z = "abort(" + Z + "). Build with -s ASSERTIONS=1 for more info."; + var re = new WebAssembly.RuntimeError(Z); + throw o(re), re; + } + function ic(Z, re) { + return String.prototype.startsWith ? Z.startsWith(re) : Z.indexOf(re) === 0; + } + var qp = "data:application/octet-stream;base64,"; + function oc(Z) { + return ic(Z, qp); + } + var uc = "file://"; + function lc(Z) { + return ic(Z, uc); + } + var mn = "tfjs-backend-wasm.wasm"; + oc(mn) || (mn = v(mn)); + function cc(Z) { + try { + if (Z == mn && F) + return new Uint8Array(F); + if (T) + return T(Z); + throw "both async and sync fetching of the wasm failed"; + } catch (re) { + Ps(re); + } + } + function jp() { + if (!F && (f || m)) { + if (typeof fetch == "function" && !lc(mn)) + return fetch(mn, { credentials: "same-origin" }).then(function(Z) { + if (!Z.ok) + throw "failed to load wasm binary file at '" + mn + "'"; + return Z.arrayBuffer(); + }).catch(function() { + return cc(mn); + }); + if (k) + return new Promise(function(Z, re) { + k(mn, function(ge) { + Z(new Uint8Array(ge)); + }, re); + }); + } + return Promise.resolve().then(function() { + return cc(mn); + }); + } + function Kp() { + var Z = { a: gn }; + function re(Ve, Ue) { + var qt = Ve.exports; + a.asm = qt, z = a.asm.h, at(z.buffer), pn = a.asm.Va, cu("wasm-instantiate"); + } + lu("wasm-instantiate"); + function ge(Ve) { + re(Ve.instance); + } + function Ze(Ve) { + return jp().then(function(Ue) { + return WebAssembly.instantiate(Ue, Z); + }).then(Ve, function(Ue) { + D("failed to asynchronously prepare wasm: " + Ue), Ps(Ue); + }); + } + function xt() { + return !F && typeof WebAssembly.instantiateStreaming == "function" && !oc(mn) && !lc(mn) && typeof fetch == "function" ? fetch(mn, { credentials: "same-origin" }).then(function(Ve) { + var Ue = WebAssembly.instantiateStreaming(Ve, Z); + return Ue.then(ge, function(qt) { + return D("wasm streaming compile failed: " + qt), D("falling back to ArrayBuffer instantiation"), Ze(ge); + }); + }) : Ze(ge); + } + if (a.instantiateWasm) + try { + var ft = a.instantiateWasm(Z, re); + return ft; + } catch (Ve) { + return D("Module.instantiateWasm callback failed with error: " + Ve), false; + } + return xt().catch(o), {}; + } + function jn(Z) { + for (; Z.length > 0; ) { + var re = Z.shift(); + if (typeof re == "function") { + re(a); + continue; + } + var ge = re.func; + typeof ge == "number" ? re.arg === void 0 ? pn.get(ge)() : pn.get(ge)(re.arg) : ge(re.arg === void 0 ? null : re.arg); + } + } + function du() { + Ps(); + } + function zr(Z, re, ge) { + Te.copyWithin(Z, re, re + ge); + } + function Xp() { + return Te.length; + } + function Yp(Z) { + try { + return z.grow(Z - Ie.byteLength + 65535 >>> 16), at(z.buffer), 1; + } catch (re) { + } + } + function zs(Z) { + var re = Xp(), ge = 2147483648; + if (Z > ge) + return false; + for (var Ze = 1; Ze <= 4; Ze *= 2) { + var xt = re * (1 + 0.2 / Ze); + xt = Math.min(xt, Z + 100663296); + var ft = Math.min(ge, me(Math.max(Z, xt), 65536)), Ve = Yp(ft); + if (Ve) + return true; + } + return false; + } + var Mr = { mappings: {}, buffers: [null, [], []], printChar: function(Z, re) { + var ge = Mr.buffers[Z]; + re === 0 || re === 10 ? ((Z === 1 ? P : D)(se(ge, 0)), ge.length = 0) : ge.push(re); + }, varargs: void 0, get: function() { + Mr.varargs += 4; + var Z = We[Mr.varargs - 4 >> 2]; + return Z; + }, getStr: function(Z) { + var re = te(Z); + return re; + }, get64: function(Z, re) { + return Z; + } }; + function Qp(Z) { + return 0; + } + function dc(Z, re, ge, Ze, xt) { + } + function Zp(Z, re, ge, Ze) { + for (var xt = 0, ft = 0; ft < ge; ft++) { + for (var Ve = We[re + ft * 8 >> 2], Ue = We[re + (ft * 8 + 4) >> 2], qt = 0; qt < Ue; qt++) + Mr.printChar(Z, Te[Ve + qt]); + xt += Ue; + } + return We[Ze >> 2] = xt, 0; + } + function pc() { + return 28; + } + var gn = { a: du, d: zr, e: zs, f: Qp, c: dc, b: Zp, g: pc }, Jp = Kp(), hc = a.___wasm_call_ctors = function() { + return (hc = a.___wasm_call_ctors = a.asm.i).apply(null, arguments); + }, eh = a._init = function() { + return (eh = a._init = a.asm.j).apply(null, arguments); + }, fc = a._init_with_threads_count = function() { + return (fc = a._init_with_threads_count = a.asm.k).apply(null, arguments); + }, th = a._get_threads_count = function() { + return (th = a._get_threads_count = a.asm.l).apply(null, arguments); + }, wi = a._register_tensor = function() { + return (wi = a._register_tensor = a.asm.m).apply(null, arguments); + }, er = a._dispose_data = function() { + return (er = a._dispose_data = a.asm.n).apply(null, arguments); + }, pu = a._dispose = function() { + return (pu = a._dispose = a.asm.o).apply(null, arguments); + }, nh = a._Abs = function() { + return (nh = a._Abs = a.asm.p).apply(null, arguments); + }, sh = a._Add = function() { + return (sh = a._Add = a.asm.q).apply(null, arguments); + }, mc = a._AddN = function() { + return (mc = a._AddN = a.asm.r).apply(null, arguments); + }, ke = a._All = function() { + return (ke = a._All = a.asm.s).apply(null, arguments); + }, rh = a._Any = function() { + return (rh = a._Any = a.asm.t).apply(null, arguments); + }, ah = a._ArgMax = function() { + return (ah = a._ArgMax = a.asm.u).apply(null, arguments); + }, ih = a._AvgPool = function() { + return (ih = a._AvgPool = a.asm.v).apply(null, arguments); + }, oh = a._BatchMatMul = function() { + return (oh = a._BatchMatMul = a.asm.w).apply(null, arguments); + }, uh = a._Ceil = function() { + return (uh = a._Ceil = a.asm.x).apply(null, arguments); + }, Lr = a._ClipByValue = function() { + return (Lr = a._ClipByValue = a.asm.y).apply(null, arguments); + }, lh = a._Conv2D = function() { + return (lh = a._Conv2D = a.asm.z).apply(null, arguments); + }, ch = a._Conv2DBackpropInput = function() { + return (ch = a._Conv2DBackpropInput = a.asm.A).apply(null, arguments); + }, dh = a._Cos = function() { + return (dh = a._Cos = a.asm.B).apply(null, arguments); + }, ph = a._Cosh = function() { + return (ph = a._Cosh = a.asm.C).apply(null, arguments); + }, hh = a._CropAndResize = function() { + return (hh = a._CropAndResize = a.asm.D).apply(null, arguments); + }, fh = a._Cumsum = function() { + return (fh = a._Cumsum = a.asm.E).apply(null, arguments); + }, gc = a._DepthToSpace = function() { + return (gc = a._DepthToSpace = a.asm.F).apply(null, arguments); + }, mh = a._DepthwiseConv2dNative = function() { + return (mh = a._DepthwiseConv2dNative = a.asm.G).apply(null, arguments); + }, gh = a._Elu = function() { + return (gh = a._Elu = a.asm.H).apply(null, arguments); + }, tr = a._Equal = function() { + return (tr = a._Equal = a.asm.I).apply(null, arguments); + }, hu = a._Exp = function() { + return (hu = a._Exp = a.asm.J).apply(null, arguments); + }, fu = a._FlipLeftRight = function() { + return (fu = a._FlipLeftRight = a.asm.K).apply(null, arguments); + }, bh = a._Floor = function() { + return (bh = a._Floor = a.asm.L).apply(null, arguments); + }, yh = a._FloorDiv = function() { + return (yh = a._FloorDiv = a.asm.M).apply(null, arguments); + }, vh = a._FusedBatchNorm = function() { + return (vh = a._FusedBatchNorm = a.asm.N).apply(null, arguments); + }, xh = a._FusedConv2D = function() { + return (xh = a._FusedConv2D = a.asm.O).apply(null, arguments); + }, wh = a._FusedDepthwiseConv2D = function() { + return (wh = a._FusedDepthwiseConv2D = a.asm.P).apply(null, arguments); + }, Pe = a._Gather = function() { + return (Pe = a._Gather = a.asm.Q).apply(null, arguments); + }, kh = a._GatherNd = function() { + return (kh = a._GatherNd = a.asm.R).apply(null, arguments); + }, Ih = a._Greater = function() { + return (Ih = a._Greater = a.asm.S).apply(null, arguments); + }, Sh = a._GreaterEqual = function() { + return (Sh = a._GreaterEqual = a.asm.T).apply(null, arguments); + }, Ch = a._LeakyRelu = function() { + return (Ch = a._LeakyRelu = a.asm.U).apply(null, arguments); + }, Nh = a._Less = function() { + return (Nh = a._Less = a.asm.V).apply(null, arguments); + }, Th = a._LessEqual = function() { + return (Th = a._LessEqual = a.asm.W).apply(null, arguments); + }, mu = a._Log = function() { + return (mu = a._Log = a.asm.X).apply(null, arguments); + }, bc = a._LogicalAnd = function() { + return (bc = a._LogicalAnd = a.asm.Y).apply(null, arguments); + }, yc = a._Max = function() { + return (yc = a._Max = a.asm.Z).apply(null, arguments); + }, $h = a._MaxPool = function() { + return ($h = a._MaxPool = a.asm._).apply(null, arguments); + }, Ah = a._Maximum = function() { + return (Ah = a._Maximum = a.asm.$).apply(null, arguments); + }, Eh = a._Mean = function() { + return (Eh = a._Mean = a.asm.aa).apply(null, arguments); + }, _h = a._Min = function() { + return (_h = a._Min = a.asm.ba).apply(null, arguments); + }, Rh = a._Minimum = function() { + return (Rh = a._Minimum = a.asm.ca).apply(null, arguments); + }, Dh = a._MirrorPad = function() { + return (Dh = a._MirrorPad = a.asm.da).apply(null, arguments); + }, Fh = a._Multiply = function() { + return (Fh = a._Multiply = a.asm.ea).apply(null, arguments); + }, Je = a._Neg = function() { + return (Je = a._Neg = a.asm.fa).apply(null, arguments); + }, Oh = a._NonMaxSuppressionV3 = function() { + return (Oh = a._NonMaxSuppressionV3 = a.asm.ga).apply(null, arguments); + }, Ph = a._NonMaxSuppressionV4 = function() { + return (Ph = a._NonMaxSuppressionV4 = a.asm.ha).apply(null, arguments); + }, zh = a._NonMaxSuppressionV5 = function() { + return (zh = a._NonMaxSuppressionV5 = a.asm.ia).apply(null, arguments); + }, ki = a._NotEqual = function() { + return (ki = a._NotEqual = a.asm.ja).apply(null, arguments); + }, vc = a._OneHot = function() { + return (vc = a._OneHot = a.asm.ka).apply(null, arguments); + }, xc = a._PadV2 = function() { + return (xc = a._PadV2 = a.asm.la).apply(null, arguments); + }, wc = a._Pow = function() { + return (wc = a._Pow = a.asm.ma).apply(null, arguments); + }, Mh = a._Prelu = function() { + return (Mh = a._Prelu = a.asm.na).apply(null, arguments); + }, Lh = a._Prod = function() { + return (Lh = a._Prod = a.asm.oa).apply(null, arguments); + }, kc = a._RealDiv = function() { + return (kc = a._RealDiv = a.asm.pa).apply(null, arguments); + }, Bh = a._Relu = function() { + return (Bh = a._Relu = a.asm.qa).apply(null, arguments); + }, Vh = a._Relu6 = function() { + return (Vh = a._Relu6 = a.asm.ra).apply(null, arguments); + }, Wh = a._ResizeBilinear = function() { + return (Wh = a._ResizeBilinear = a.asm.sa).apply(null, arguments); + }, Uh = a._Reverse = function() { + return (Uh = a._Reverse = a.asm.ta).apply(null, arguments); + }, Gh = a._RotateWithOffset = function() { + return (Gh = a._RotateWithOffset = a.asm.ua).apply(null, arguments); + }, Ic = a._Round = function() { + return (Ic = a._Round = a.asm.va).apply(null, arguments); + }, nr = a._Rsqrt = function() { + return (nr = a._Rsqrt = a.asm.wa).apply(null, arguments); + }, Hh = a._ScatterNd = function() { + return (Hh = a._ScatterNd = a.asm.xa).apply(null, arguments); + }, qh = a._SelectV2 = function() { + return (qh = a._SelectV2 = a.asm.ya).apply(null, arguments); + }, yv = a._Sigmoid = function() { + return (yv = a._Sigmoid = a.asm.za).apply(null, arguments); + }, Sc = a._Sin = function() { + return (Sc = a._Sin = a.asm.Aa).apply(null, arguments); + }, jh = a._Softmax = function() { + return (jh = a._Softmax = a.asm.Ba).apply(null, arguments); + }, Kh = a._SparseFillEmptyRows = function() { + return (Kh = a._SparseFillEmptyRows = a.asm.Ca).apply(null, arguments); + }, Xh = a._SparseReshape = function() { + return (Xh = a._SparseReshape = a.asm.Da).apply(null, arguments); + }, Yh = a._SparseSegmentReduction = function() { + return (Yh = a._SparseSegmentReduction = a.asm.Ea).apply(null, arguments); + }, Qh = a._Sqrt = function() { + return (Qh = a._Sqrt = a.asm.Fa).apply(null, arguments); + }, Zh = a._Square = function() { + return (Zh = a._Square = a.asm.Ga).apply(null, arguments); + }, Jh = a._SquaredDifference = function() { + return (Jh = a._SquaredDifference = a.asm.Ha).apply(null, arguments); + }, ef = a._Step = function() { + return (ef = a._Step = a.asm.Ia).apply(null, arguments); + }, tf = a._StridedSlice = function() { + return (tf = a._StridedSlice = a.asm.Ja).apply(null, arguments); + }, nf = a._Sub = function() { + return (nf = a._Sub = a.asm.Ka).apply(null, arguments); + }, sf = a._Sum = function() { + return (sf = a._Sum = a.asm.La).apply(null, arguments); + }, rf = a._Tan = function() { + return (rf = a._Tan = a.asm.Ma).apply(null, arguments); + }, af = a._Tanh = function() { + return (af = a._Tanh = a.asm.Na).apply(null, arguments); + }, of = a._Tile = function() { + return (of = a._Tile = a.asm.Oa).apply(null, arguments); + }, uf = a._TopK = function() { + return (uf = a._TopK = a.asm.Pa).apply(null, arguments); + }, lf = a._Transform = function() { + return (lf = a._Transform = a.asm.Qa).apply(null, arguments); + }, cf = a._Transpose = function() { + return (cf = a._Transpose = a.asm.Ra).apply(null, arguments); + }, df = a.__FusedMatMul = function() { + return (df = a.__FusedMatMul = a.asm.Sa).apply(null, arguments); + }, pf = a._malloc = function() { + return (pf = a._malloc = a.asm.Ta).apply(null, arguments); + }, hf = a._free = function() { + return (hf = a._free = a.asm.Ua).apply(null, arguments); + }, Cc = a.stackSave = function() { + return (Cc = a.stackSave = a.asm.Wa).apply(null, arguments); + }, Nc = a.stackRestore = function() { + return (Nc = a.stackRestore = a.asm.Xa).apply(null, arguments); + }, gu = a.stackAlloc = function() { + return (gu = a.stackAlloc = a.asm.Ya).apply(null, arguments); + }; + a.cwrap = J; + var Ii; + function ff(Z) { + this.name = "ExitStatus", this.message = "Program terminated with exit(" + Z + ")", this.status = Z; + } + Fn = function Z() { + Ii || bu(), Ii || (Fn = Z); + }; + function bu(Z) { + if (Z = Z || d, Qt > 0 || (Cn(), Qt > 0)) + return; + function re() { + Ii || (Ii = true, a.calledRun = true, !W && (Rn(), qn(), i(a), a.onRuntimeInitialized && a.onRuntimeInitialized(), Dn())); + } + a.setStatus ? (a.setStatus("Running..."), setTimeout(function() { + setTimeout(function() { + a.setStatus(""); + }, 1), re(); + }, 1)) : re(); + } + if (a.run = bu, a.preInit) + for (typeof a.preInit == "function" && (a.preInit = [a.preInit]); a.preInit.length > 0; ) + a.preInit.pop()(); + bu(); + var Si; + u && (Si = { uncaughtException: process.listeners("uncaughtException").filter(function(Z) { + return !u.uncaughtException.indexOf(Z) > -1; + }), unhandledRejection: process.listeners("unhandledRejection").filter(function(Z) { + return !u.unhandledRejection.indexOf(Z) > -1; + }) }); + var Ci; + if (typeof r != "undefined") + Ci = r; + else if (typeof WasmBackendModuleThreadedSimd != "undefined") + Ci = WasmBackendModuleThreadedSimd; + else + throw new Error("Could not find wasm module in post.js"); + if (Si) { + var mf = Ci._dispose; + Ci._dispose = function() { + mf(), Si.uncaughtException.forEach(function(Z) { + process.removeListener("uncaughtException", Z); + }), Si.unhandledRejection.forEach(function(Z) { + process.removeListener("unhandledRejection", Z); + }); + }; + } + return r.ready; + }; + }(); + typeof e == "object" && typeof t == "object" ? t.exports = n : typeof define == "function" && define.amd ? define([], function() { + return n; + }) : typeof e == "object" && (e.WasmBackendModule = n); +} }); +var RT = 1e-7; +var DT = 1e-4; +var Od = class { + constructor(e, t) { + this.backend = e, this.dataMover = t, this.data = /* @__PURE__ */ new WeakMap(), this.dataIdsCount = 0; + } + get(e) { + return this.data.has(e) || this.dataMover.moveData(this.backend, e), this.data.get(e); + } + set(e, t) { + this.dataIdsCount++, this.data.set(e, t); + } + has(e) { + return this.data.has(e); + } + delete(e) { + return this.dataIdsCount--, this.data.delete(e); + } + numDataIds() { + return this.dataIdsCount; + } +}; +var tl = class { + refCount(e) { + return On("refCount"); + } + incRef(e) { + return On("incRef"); + } + timerAvailable() { + return true; + } + time(e) { + return On("time"); + } + read(e) { + return On("read"); + } + readSync(e) { + return On("readSync"); + } + readToGPU(e, t) { + return On("readToGPU"); + } + numDataIds() { + return On("numDataIds"); + } + disposeData(e, t) { + return On("disposeData"); + } + write(e, t, n) { + return On("write"); + } + move(e, t, n, s, r) { + return On("move"); + } + memory() { + return On("memory"); + } + floatPrecision() { + return On("floatPrecision"); + } + epsilon() { + return this.floatPrecision() === 32 ? RT : DT; + } + dispose() { + return On("dispose"); + } +}; +function On(e) { + throw new Error(`'${e}' not yet implemented or not found in the registry. This kernel may not be supported by the tfjs backend you have chosen`); +} +function mw(e) { + let t = e.length, n = 0; + for (; t > 0; ) + n = Math.random() * t | 0, t--, rd(e, t, n); +} +function FT(e, t) { + if (e.length !== t.length) + throw new Error(`Array sizes must match to be shuffled together First array length was ${e.length}Second array length was ${t.length}`); + let n = e.length, s = 0; + for (; n > 0; ) + s = Math.random() * n | 0, n--, rd(e, n, s), rd(t, n, s); +} +function Bu(e, t, n) { + return Math.max(e, Math.min(t, n)); +} +function OT(e) { + return e % 2 === 0 ? e : e + 1; +} +function rd(e, t, n) { + let s = e[t]; + e[t] = e[n], e[n] = s; +} +function PT(e) { + let t = 0; + for (let n = 0; n < e.length; n++) + t += e[n]; + return t; +} +function zT(e, t) { + let n = Math.random(); + return t * n + (1 - n) * e; +} +function MT(e, t) { + let n = 0; + for (let s = 0; s < e.length; s++) { + let r = Number(e[s]) - Number(t[s]); + n += r * r; + } + return n; +} +function O(e, t) { + if (!e) + throw new Error(typeof t == "string" ? t : t()); +} +function on(e, t, n = "") { + O(Sr(e, t), () => n + ` Shapes ${e} and ${t} must match`); +} +function xa(e) { + O(e != null, () => "The input to the tensor constructor must be a non-null value."); +} +function na(e, t = [], n = false) { + if (t == null && (t = []), Array.isArray(e) || Zt(e) && !n) + for (let s = 0; s < e.length; ++s) + na(e[s], t, n); + else + t.push(e); + return t; +} +function pt(e) { + if (e.length === 0) + return 1; + let t = e[0]; + for (let n = 1; n < e.length; n++) + t *= e[n]; + return t; +} +function LT(e) { + return e.length === 0; +} +function Sr(e, t) { + if (e === t) + return true; + if (e == null || t == null || e.length !== t.length) + return false; + for (let n = 0; n < e.length; n++) + if (e[n] !== t[n]) + return false; + return true; +} +function ji(e) { + return e % 1 === 0; +} +function BT(e) { + if (Math.tanh != null) + return Math.tanh(e); + if (e === 1 / 0) + return 1; + if (e === -1 / 0) + return -1; + { + let t = Math.exp(2 * e); + return (t - 1) / (t + 1); + } +} +function VT(e) { + let t = Math.ceil(Math.sqrt(e)); + return [t, Math.ceil(e / t)]; +} +function WT(e) { + let t = new Uint32Array(e); + for (let n = 0; n < e; ++n) + t[n] = n; + return mw(t), t; +} +function Pu(e, t) { + return t <= e.length ? e : e + " ".repeat(t - e.length); +} +function UT(e, t = (s) => 0, n) { + return new Promise((s, r) => { + let a = 0, i = () => { + if (e()) { + s(); + return; + } + a++; + let o = t(a); + if (n != null && a >= n) { + r(); + return; + } + setTimeout(i, o); + }; + i(); + }); +} +function GT(e, t) { + let n = 1, s = -1; + for (let a = 0; a < e.length; ++a) + if (e[a] >= 0) + n *= e[a]; + else if (e[a] === -1) { + if (s !== -1) + throw Error(`Shapes can only have 1 implicit size. Found -1 at dim ${s} and dim ${a}`); + s = a; + } else if (e[a] < 0) + throw Error(`Shapes can not be < 0. Found ${e[a]} at dim ${a}`); + if (s === -1) { + if (t > 0 && t !== n) + throw Error(`Size(${t}) must match the product of shape ${e}`); + return e; + } + if (n === 0) + throw Error(`Cannot infer the missing size in [${e}] when there are 0 elements`); + if (t % n !== 0) + throw Error(`The implicit shape can't be a fractional number. Got ${t} / ${n}`); + let r = e.slice(); + return r[s] = t / n, r; +} +function ss(e, t) { + let n = t.length; + return e = e == null ? t.map((s, r) => r) : [].concat(e), O(e.every((s) => s >= -n && s < n), () => `All values in axis param must be in range [-${n}, ${n}) but got axis ${e}`), O(e.every((s) => ji(s)), () => `All values in axis param must be integers but got axis ${e}`), e.map((s) => s < 0 ? n + s : s); +} +function gw(e, t) { + let n = [], s = [], r = t != null && Array.isArray(t) && t.length === 0, a = t == null || r ? null : ss(t, e).sort(), i = 0; + for (let o = 0; o < e.length; ++o) { + if (a != null) { + if (a[i] === o && e[o] !== 1) + throw new Error(`Can't squeeze axis ${o} since its dim '${e[o]}' is not 1`); + (a[i] == null || a[i] > o) && e[o] === 1 && (n.push(e[o]), s.push(o)), a[i] <= o && i++; + } + e[o] !== 1 && (n.push(e[o]), s.push(o)); + } + return { newShape: n, keptDims: s }; +} +function bw(e, t) { + let n = null; + if (e == null || e === "float32") + n = new Float32Array(t); + else if (e === "int32") + n = new Int32Array(t); + else if (e === "bool") + n = new Uint8Array(t); + else + throw new Error(`Unknown data type ${e}`); + return n; +} +function yw(e, t) { + let n = null; + if (e == null || e === "float32") + n = new Float32Array(t); + else if (e === "int32") + n = new Int32Array(t); + else if (e === "bool") + n = new Uint8Array(t); + else if (e === "string") + n = new Array(t); + else + throw new Error(`Unknown data type ${e}`); + return n; +} +function vw(e, t) { + for (let n = 0; n < e.length; n++) { + let s = e[n]; + if (isNaN(s) || !isFinite(s)) + throw Error(`A tensor of type ${t} being uploaded contains ${s}.`); + } +} +function xw(e) { + return e === "bool" || e === "complex64" || e === "float32" || e === "int32" || e === "string"; +} +function HT(e, t) { + return !(t === "complex64" || t === "float32" && e !== "complex64" || t === "int32" && e !== "float32" && e !== "complex64" || t === "bool" && e === "bool"); +} +function Zt(e) { + return e instanceof Float32Array || e instanceof Int32Array || e instanceof Uint8Array || e instanceof Uint8ClampedArray; +} +function Of(e) { + if (e === "float32" || e === "int32") + return 4; + if (e === "complex64") + return 8; + if (e === "bool") + return 1; + throw new Error(`Unknown dtype ${e}`); +} +function ww(e) { + if (e == null) + return 0; + let t = 0; + return e.forEach((n) => t += n.length), t; +} +function lr(e) { + return typeof e == "string" || e instanceof String; +} +function kw(e) { + return typeof e == "boolean"; +} +function Iw(e) { + return typeof e == "number"; +} +function Pd(e) { + return Array.isArray(e) ? Pd(e[0]) : e instanceof Float32Array ? "float32" : e instanceof Int32Array || e instanceof Uint8Array || e instanceof Uint8ClampedArray ? "int32" : Iw(e) ? "float32" : lr(e) ? "string" : kw(e) ? "bool" : "float32"; +} +function gr(e) { + return !!(e && e.constructor && e.call && e.apply); +} +function ad(e, t) { + for (let n = t; n < e; ++n) + if (e % n === 0) + return n; + return e; +} +function no(e) { + let t = e.length; + if (t < 2) + return []; + let n = new Array(t - 1); + n[t - 2] = e[t - 1]; + for (let s = t - 3; s >= 0; --s) + n[s] = n[s + 1] * e[s + 1]; + return n; +} +function Sw(e, t, n, s = false) { + let r = new Array(); + if (t.length === 1) { + let a = t[0] * (s ? 2 : 1); + for (let i = 0; i < a; i++) + r[i] = n[e + i]; + } else { + let a = t[0], i = t.slice(1), o = i.reduce((u, c) => u * c) * (s ? 2 : 1); + for (let u = 0; u < a; u++) + r[u] = Sw(e + u * o, i, n, s); + } + return r; +} +function Wi(e, t, n = false) { + if (e.length === 0) + return t[0]; + let s = e.reduce((r, a) => r * a) * (n ? 2 : 1); + if (s === 0) + return []; + if (s !== t.length) + throw new Error(`[${e}] does not match the input size ${t.length}${n ? " for a complex tensor" : ""}.`); + return Sw(0, e, t, n); +} +function Vm(e, t) { + let n = zd(e, t); + for (let s = 0; s < n.length; s++) + n[s] = 1; + return n; +} +function zd(e, t) { + if (t == null || t === "float32" || t === "complex64") + return new Float32Array(e); + if (t === "int32") + return new Int32Array(e); + if (t === "bool") + return new Uint8Array(e); + throw new Error(`Unknown data type ${t}`); +} +function qT(e, t) { + let n = e.reduce((s, r) => s * r, 1); + if (t == null || t === "float32") + return Wi(e, new Float32Array(n)); + if (t === "int32") + return Wi(e, new Int32Array(n)); + if (t === "bool") + return Wi(e, new Uint8Array(n)); + throw new Error(`Unknown data type ${t}`); +} +function Wm(e) { + e.forEach((t) => { + O(Number.isInteger(t) && t >= 0, () => `Tensor must have a shape comprised of positive integers but got shape [${e}].`); + }); +} +function jT(e, t, n) { + if (t === 0) + return 0; + if (t === 1) + return e[0]; + let s = e[e.length - 1]; + for (let r = 0; r < e.length - 1; ++r) + s += n[r] * e[r]; + return s; +} +function KT(e, t, n) { + if (t === 0) + return []; + if (t === 1) + return [e]; + let s = new Array(t); + for (let r = 0; r < s.length - 1; ++r) + s[r] = Math.floor(e / n[r]), e -= s[r] * n[r]; + return s[s.length - 1] = e, s; +} +function Um(e) { + return e && e.then && typeof e.then == "function"; +} +var _v = "tfjsflags"; +var XT = class { + constructor(e) { + this.global = e, this.flags = {}, this.flagRegistry = {}, this.urlFlags = {}, this.getQueryParams = YT, this.populateURLFlags(); + } + setPlatform(e, t) { + this.platform != null && (X().getBool("IS_TEST") || X().getBool("PROD") || console.warn(`Platform ${this.platformName} has already been set. Overwriting the platform with ${e}.`)), this.platformName = e, this.platform = t; + } + registerFlag(e, t, n) { + if (this.flagRegistry[e] = { evaluationFn: t, setHook: n }, this.urlFlags[e] != null) { + let s = this.urlFlags[e]; + X().getBool("IS_TEST") || X().getBool("PROD") || console.warn(`Setting feature override from URL ${e}: ${s}.`), this.set(e, s); + } + } + async getAsync(e) { + return e in this.flags ? this.flags[e] : (this.flags[e] = await this.evaluateFlag(e), this.flags[e]); + } + get(e) { + if (e in this.flags) + return this.flags[e]; + let t = this.evaluateFlag(e); + if (Um(t)) + throw new Error(`Flag ${e} cannot be synchronously evaluated. Please use getAsync() instead.`); + return this.flags[e] = t, this.flags[e]; + } + getNumber(e) { + return this.get(e); + } + getBool(e) { + return this.get(e); + } + getFlags() { + return this.flags; + } + get features() { + return this.flags; + } + set(e, t) { + if (this.flagRegistry[e] == null) + throw new Error(`Cannot set flag ${e} as it has not been registered.`); + this.flags[e] = t, this.flagRegistry[e].setHook != null && this.flagRegistry[e].setHook(t); + } + evaluateFlag(e) { + if (this.flagRegistry[e] == null) + throw new Error(`Cannot evaluate flag '${e}': no evaluation function found.`); + return this.flagRegistry[e].evaluationFn(); + } + setFlags(e) { + this.flags = Object.assign({}, e); + } + reset() { + this.flags = {}, this.urlFlags = {}, this.populateURLFlags(); + } + populateURLFlags() { + if (typeof this.global == "undefined" || typeof this.global.location == "undefined" || typeof this.global.location.search == "undefined") + return; + let e = this.getQueryParams(this.global.location.search); + _v in e && e[_v].split(",").forEach((n) => { + let [s, r] = n.split(":"); + this.urlFlags[s] = ZT(s, r); + }); + } +}; +function YT(e) { + let t = {}; + return e.replace(/[?&]([^=?&]+)(?:=([^&]*))?/g, (n, ...s) => (QT(t, s[0], s[1]), s.join("="))), t; +} +function QT(e, t, n) { + e[decodeURIComponent(t)] = decodeURIComponent(n || ""); +} +function ZT(e, t) { + if (t = t.toLowerCase(), t === "true" || t === "false") + return t === "true"; + if (`${+t}` === t) + return +t; + throw new Error(`Could not parse value flag value ${t} for flag ${e}.`); +} +function X() { + return Cw; +} +var Cw = null; +function JT(e) { + Cw = e; +} +var vf; +function Nw() { + if (vf == null) { + let e; + if (typeof window != "undefined") + e = window; + else if (typeof global != "undefined") + e = global; + else if (typeof process != "undefined") + e = process; + else if (typeof self != "undefined") + e = self; + else + throw new Error("Could not find a global object"); + vf = e; + } + return vf; +} +function e$() { + let e = Nw(); + return e._tfGlobals == null && (e._tfGlobals = /* @__PURE__ */ new Map()), e._tfGlobals; +} +function Gm(e, t) { + let n = e$(); + if (n.has(e)) + return n.get(e); + { + let s = t(); + return n.set(e, s), n.get(e); + } +} +var so = "Abs"; +var nl = "Acos"; +var sl = "Acosh"; +var Cr = "Add"; +var wa = "AddN"; +var rl = "All"; +var al = "Any"; +var ka = "ArgMax"; +var il = "ArgMin"; +var ol = "Asin"; +var ul = "Asinh"; +var ll = "Atan"; +var cl = "Atanh"; +var dl = "Atan2"; +var Ia = "AvgPool"; +var Hm = "AvgPoolGrad"; +var Md = "AvgPool3D"; +var qm = "AvgPool3DGrad"; +var Sa = "BatchMatMul"; +var ro = "BatchToSpaceND"; +var jm = "Bincount"; +var t$ = "BroadcastTo"; +var Km = "BroadcastArgs"; +var Ca = "Cast"; +var Na = "Ceil"; +var Nr = "ClipByValue"; +var Ld = "Complex"; +var Bd = "ComplexAbs"; +var ao = "Concat"; +var Ta = "Conv2D"; +var Xm = "Conv2DBackpropFilter"; +var $a = "Conv2DBackpropInput"; +var Vd = "Conv3D"; +var Ym = "Conv3DBackpropFilterV2"; +var Qm = "Conv3DBackpropInputV2"; +var Aa = "Cos"; +var Ea = "Cosh"; +var io = "Cumsum"; +var oo = "CropAndResize"; +var Zm = "DenseBincount"; +var uo = "DepthToSpace"; +var _a = "DepthwiseConv2dNative"; +var Jm = "DepthwiseConv2dNativeBackpropFilter"; +var eg = "DepthwiseConv2dNativeBackpropInput"; +var tg = "Diag"; +var Wd = "Dilation2D"; +var Pf = "Dilation2DBackpropInput"; +var zf = "Dilation2DBackpropFilter"; +var Ra = "RealDiv"; +var Ud = "Einsum"; +var Da = "Elu"; +var ng = "EluGrad"; +var pl = "Erf"; +var lo = "Equal"; +var Fa = "Exp"; +var co = "ExpandDims"; +var po = "Expm1"; +var sg = "FFT"; +var hl = "Fill"; +var ho = "FlipLeftRight"; +var Oa = "Floor"; +var Pa = "FloorDiv"; +var za = "FusedBatchNorm"; +var fo = "GatherV2"; +var mo = "GatherNd"; +var go = "Greater"; +var Ma = "GreaterEqual"; +var La = "Identity"; +var rg = "IFFT"; +var Gd = "Imag"; +var fl = "IsFinite"; +var ml = "IsInf"; +var gl = "IsNan"; +var Ba = "LeakyRelu"; +var bo = "Less"; +var yo = "LessEqual"; +var ag = "LinSpace"; +var Va = "Log"; +var bl = "Log1p"; +var vo = "LogicalAnd"; +var yl = "LogicalNot"; +var Hd = "LogicalOr"; +var n$ = "LogSoftmax"; +var qd = "LRN"; +var ig = "LRNGrad"; +var Wa = "Max"; +var Ua = "Maximum"; +var Ga = "MaxPool"; +var og = "MaxPoolGrad"; +var jd = "MaxPool3D"; +var ug = "MaxPool3DGrad"; +var lg = "MaxPoolWithArgmax"; +var Ha = "Mean"; +var qa = "Min"; +var ja = "Minimum"; +var Ka = "MirrorPad"; +var vl = "Mod"; +var cg = "Multinomial"; +var Xa = "Multiply"; +var xo = "Neg"; +var wo = "NotEqual"; +var ko = "NonMaxSuppressionV3"; +var xl = "NonMaxSuppressionV4"; +var Io = "NonMaxSuppressionV5"; +var So = "OnesLike"; +var Co = "OneHot"; +var No = "Pack"; +var Ya = "PadV2"; +var qce = "Pool"; +var Qa = "Pow"; +var Za = "Prelu"; +var To = "Prod"; +var wl = "Range"; +var Kd = "Real"; +var kl = "Reciprocal"; +var Ja = "Relu"; +var $o = "Reshape"; +var Il = "ResizeNearestNeighbor"; +var dg = "ResizeNearestNeighborGrad"; +var ei = "ResizeBilinear"; +var pg = "ResizeBilinearGrad"; +var ti = "Relu6"; +var Ao = "Reverse"; +var Eo = "Round"; +var ni = "Rsqrt"; +var _o = "ScatterNd"; +var Ro = "Select"; +var Sl = "Selu"; +var Do = "Slice"; +var si = "Sin"; +var Fo = "Sinh"; +var Cl = "Sign"; +var ri = "Sigmoid"; +var Nl = "Softplus"; +var ai = "Sqrt"; +var ii = "Sum"; +var Oo = "SpaceToBatchND"; +var Po = "SplitV"; +var oi = "Softmax"; +var Xd = "SparseFillEmptyRows"; +var Tl = "SparseReshape"; +var Yd = "SparseSegmentMean"; +var Qd = "SparseSegmentSum"; +var Zd = "SparseToDense"; +var ui = "SquaredDifference"; +var $l = "Square"; +var zo = "StridedSlice"; +var Jd = "StringNGrams"; +var hg = "StringSplit"; +var fg = "StringToHashBucketFast"; +var li = "Sub"; +var Mo = "Tan"; +var ci = "Tanh"; +var Tr = "Tile"; +var Lo = "TopK"; +var Bo = "Transform"; +var di = "Transpose"; +var mg = "Unique"; +var Vo = "Unpack"; +var ep = "UnsortedSegmentSum"; +var Wo = "ZerosLike"; +var pi = "Step"; +var id = "FromPixels"; +var Uo = "RotateWithOffset"; +var sa = "_FusedMatMul"; +var ra = "FusedConv2D"; +var aa = "FusedDepthwiseConv2D"; +function ur(...e) { + X().getBool("IS_TEST") || X().getBool("PROD") || console.warn(...e); +} +function s$(...e) { + X().getBool("IS_TEST") || X().getBool("PROD") || console.log(...e); +} +var Ki = Gm("kernelRegistry", () => /* @__PURE__ */ new Map()); +var Vu = Gm("gradRegistry", () => /* @__PURE__ */ new Map()); +function Mf(e, t) { + let n = gg(e, t); + return Ki.get(n); +} +function Rv(e) { + return Vu.get(e); +} +function Lf(e) { + let t = Ki.entries(), n = []; + for (; ; ) { + let { done: s, value: r } = t.next(); + if (s) + break; + let [a, i] = r, [o] = a.split("_"); + o === e && n.push(i); + } + return n; +} +function Al(e) { + let { kernelName: t, backendName: n } = e, s = gg(t, n); + Ki.has(s) && ur(`The kernel '${t}' for backend '${n}' is already registered`), Ki.set(s, e); +} +function r$(e) { + let { kernelName: t } = e; + Vu.has(t) && X().getBool("DEBUG") && ur(`Overriding the gradient for '${t}'`), Vu.set(t, e); +} +function jce(e, t) { + let n = gg(e, t); + if (!Ki.has(n)) + throw new Error(`The kernel '${e}' for backend '${t}' is not registered`); + Ki.delete(n); +} +function Kce(e) { + if (!Vu.has(e)) + throw new Error(`The gradient '${e}' for backend is not registered`); + Vu.delete(e); +} +function Xce(e, t) { + Lf(e).forEach((s) => { + let r = Object.assign({}, s, { backendName: t }); + Al(r); + }); +} +function gg(e, t) { + return `${t}_${e}`; +} +var w = {}; +Ae(w, { arraysEqual: () => Sr, assert: () => O, assertNonNegativeIntegerDimensions: () => Wm, assertNonNull: () => xa, assertShapesMatch: () => on, bytesFromStringArray: () => ww, bytesPerElement: () => Of, checkConversionForErrors: () => vw, clamp: () => Bu, computeStrides: () => no, createScalarValue: () => c$, createShuffledIndices: () => WT, decodeString: () => od, distSquared: () => MT, encodeString: () => _l, fetch: () => p$, fingerPrint64: () => l$, flatten: () => na, getArrayFromDType: () => yw, getTypedArrayFromDType: () => bw, hasEncodingLoss: () => HT, hexToLong: () => El, indexToLoc: () => KT, inferDtype: () => Pd, inferFromImplicitShape: () => GT, isBoolean: () => kw, isFunction: () => gr, isInt: () => ji, isNumber: () => Iw, isPromise: () => Um, isScalarShape: () => LT, isString: () => lr, isTypedArray: () => Zt, isValidDtype: () => xw, locToIndex: () => jT, makeOnesTypedArray: () => Vm, makeZerosNestedTypedArray: () => qT, makeZerosTypedArray: () => zd, nearestDivisor: () => ad, nearestLargerEven: () => OT, now: () => Wu, parseAxisParam: () => ss, randUniform: () => zT, repeatedTry: () => UT, rightPad: () => Pu, shuffle: () => mw, shuffleCombo: () => FT, sizeFromShape: () => pt, sizeToSquarishShape: () => VT, squeezeShape: () => gw, sum: () => PT, swap: () => rd, tanh: () => BT, toNestedArray: () => Wi, toTypedArray: () => tp }); +var Dv = va(gT()); +var Hr = Dv.default || Dv; +function El(e) { + return Hr.fromString(e, true, 16); +} +var Tw = El("c3a5c85c97cb3127"); +var Gr = El("b492b66fbe98f273"); +var tn = El("9ae16a3b2f90404f"); +function Bf(e) { + return e.xor(e.shru(47)); +} +function $w(e, t, n) { + let s = e.slice(t, t + n); + return Hr.fromBytes(Array.from(s), true, true); +} +function lt(e, t) { + return $w(e, t, 8); +} +function Fv(e, t) { + return $w(e, t, 4); +} +function Lt(e, t) { + return t === 0 ? e : e.shru(t).or(e.shl(64 - t)); +} +function dr(e, t, n = El("9ddfea08eb382d69")) { + let s = e.xor(t).mul(n); + s = s.xor(s.shru(47)); + let r = t.xor(s).mul(n); + return r = r.xor(r.shru(47)), r = r.mul(n), r; +} +function a$(e, t, n, s, r, a) { + r = r.add(e), a = Lt(a.add(r).add(s), 21); + let i = r; + return r = r.add(t), r = r.add(n), a = a.add(Lt(r, 44)), [r.add(s), a.add(i)]; +} +function Dc(e, t, n, s) { + return a$(lt(e, t), lt(e, t + 8), lt(e, t + 16), lt(e, t + 24), n, s); +} +function i$(e, t = e.length) { + if (t >= 8) { + let n = tn.add(t * 2), s = lt(e, 0).add(tn), r = lt(e, t - 8), a = Lt(r, 37).mul(n).add(s), i = Lt(s, 25).add(r).mul(n); + return dr(a, i, n); + } + if (t >= 4) { + let n = tn.add(t * 2), s = Fv(e, 0); + return dr(s.shl(3).add(t), Fv(e, t - 4), n); + } + if (t > 0) { + let n = e[0], s = e[t >> 1], r = e[t - 1], a = n + (s << 8), i = t + (r << 2); + return Bf(tn.mul(a).xor(Tw.mul(i))).mul(tn); + } + return tn; +} +function o$(e, t = e.length) { + let n = tn.add(t * 2), s = lt(e, 0).mul(Gr), r = lt(e, 8), a = lt(e, t - 8).mul(n), i = lt(e, t - 16).mul(tn); + return dr(Lt(s.add(r), 43).add(Lt(a, 30)).add(i), s.add(Lt(r.add(tn), 18)).add(a), n); +} +function u$(e, t = e.length) { + let n = tn.add(t * 2), s = lt(e, 0).mul(tn), r = lt(e, 8), a = lt(e, t - 8).mul(n), i = lt(e, t - 16).mul(tn), o = Lt(s.add(r), 43).add(Lt(a, 30)).add(i), u = dr(o, s.add(Lt(r.add(tn), 18)).add(a), n), c = lt(e, 16).mul(n), l = lt(e, 24), d = o.add(lt(e, t - 32)).mul(n), p = u.add(lt(e, t - 24)).mul(n); + return dr(Lt(c.add(l), 43).add(Lt(d, 30)).add(p), c.add(Lt(l.add(s), 18)).add(d), n); +} +function l$(e, t = e.length) { + let n = Hr.fromNumber(81, true); + if (t <= 32) + return t <= 16 ? i$(e, t) : o$(e, t); + if (t <= 64) + return u$(e, t); + let s = n, r = n.mul(Gr).add(113), a = Bf(r.mul(tn).add(113)).mul(tn), i = [Hr.UZERO, Hr.UZERO], o = [Hr.UZERO, Hr.UZERO]; + s = s.mul(tn).add(lt(e, 0)); + let u = 0, c = (t - 1 >> 6) * 64, l = c + (t - 1 & 63) - 63; + do + s = Lt(s.add(r).add(i[0]).add(lt(e, u + 8)), 37).mul(Gr), r = Lt(r.add(i[1]).add(lt(e, u + 48)), 42).mul(Gr), s = s.xor(o[1]), r = r.add(i[0]).add(lt(e, u + 40)), a = Lt(a.add(o[0]), 33).mul(Gr), i = Dc(e, u, i[1].mul(Gr), s.add(o[0])), o = Dc(e, u + 32, a.add(o[1]), r.add(lt(e, u + 16))), [a, s] = [s, a], u += 64; + while (u !== c); + let d = Gr.add(a.and(255).shl(1)); + return u = l, o[0] = o[0].add(t - 1 & 63), i[0] = i[0].add(o[0]), o[0] = o[0].add(i[0]), s = Lt(s.add(r).add(i[0]).add(lt(e, u + 8)), 37).mul(d), r = Lt(r.add(i[1]).add(lt(e, u + 48)), 42).mul(d), s = s.xor(o[1].mul(9)), r = r.add(i[0].mul(9).add(lt(e, u + 40))), a = Lt(a.add(o[0]), 33).mul(d), i = Dc(e, u, i[1].mul(d), s.add(o[0])), o = Dc(e, u + 32, a.add(o[1]), r.add(lt(e, u + 16))), [a, s] = [s, a], dr(dr(i[0], o[0], d).add(Bf(r).mul(Tw)).add(a), dr(i[1], o[1], d).add(s), d); +} +function c$(e, t) { + return t === "string" ? _l(e) : tp([e], t); +} +function d$(e, t) { + return e instanceof Float32Array && t === "float32" || e instanceof Int32Array && t === "int32" || e instanceof Uint8Array && t === "bool"; +} +function tp(e, t) { + if (t === "string") + throw new Error("Cannot convert a string[] to a TypedArray"); + if (Array.isArray(e) && (e = na(e)), X().getBool("DEBUG") && vw(e, t), d$(e, t)) + return e; + if (t == null || t === "float32" || t === "complex64") + return new Float32Array(e); + if (t === "int32") + return new Int32Array(e); + if (t === "bool") { + let n = new Uint8Array(e.length); + for (let s = 0; s < n.length; ++s) + Math.round(e[s]) !== 0 && (n[s] = 1); + return n; + } else + throw new Error(`Unknown data type ${t}`); +} +function Wu() { + return X().platform.now(); +} +function p$(e, t) { + return X().platform.fetch(e, t); +} +function _l(e, t = "utf-8") { + return t = t || "utf-8", X().platform.encode(e, t); +} +function od(e, t = "utf-8") { + return t = t || "utf-8", X().platform.decode(e, t); +} +var h$ = class { + constructor(e, t) { + this.backendTimer = e, this.logger = t, t == null && (this.logger = new m$()); + } + profileKernel(e, t, n) { + let s, r = () => { + s = n(); + }, a, i = Wu(); + if (this.backendTimer.timerAvailable()) + a = this.backendTimer.time(r); + else { + r(); + for (let u of s) + u.dataSync(); + a = Promise.resolve({ kernelMs: Wu() - i }); + } + if (X().getBool("CHECK_COMPUTATION_FOR_ERRORS")) + for (let u = 0; u < s.length; u++) { + let c = s[u]; + c.data().then((l) => { + f$(l, c.dtype, e); + }); + } + return { kernelName: e, outputs: s, inputs: t, timeMs: a.then((u) => u.kernelMs), extraInfo: a.then((u) => u.getExtraProfileInfo != null ? u.getExtraProfileInfo() : "") }; + } + logKernelProfile(e) { + let { kernelName: t, outputs: n, timeMs: s, inputs: r, extraInfo: a } = e; + n.forEach((i) => { + Promise.all([i.data(), s, a]).then((o) => { + this.logger.logKernelProfile(t, i, o[0], o[1], r, o[2]); + }); + }); + } +}; +function f$(e, t, n) { + if (t !== "float32") + return false; + for (let s = 0; s < e.length; s++) { + let r = e[s]; + if (isNaN(r) || !isFinite(r)) + return console.warn(`Found ${r} in the result of '${n}'`), true; + } + return false; +} +var m$ = class { + logKernelProfile(e, t, n, s, r, a) { + let i = typeof s == "number" ? Pu(`${s}ms`, 9) : s.error, o = Pu(e, 25), u = t.rank, c = t.size, l = Pu(t.shape.toString(), 14), d = ""; + for (let p in r) { + let h = r[p]; + if (h != null) { + let f = h.shape || t.shape, m = f.length; + d += `${p}: ${m}D ${m > 0 ? f : ""} `; + } + } + console.log(`%c${o} %c${i} %c${u}D ${l} %c${c} %c${d} %c${a}`, "font-weight:bold", "color:red", "color:blue", "color: orange", "color: green", "color: steelblue"); + } +}; +function g$(e, t, n) { + let s = {}, r = {}; + for (let u = 0; u < t.length; u++) + s[t[u].id] = true; + for (let u = 0; u < e.length; u++) { + let c = e[u], l = c.inputs; + for (let d in l) { + let p = l[d], h = false; + for (let f = 0; f < t.length; f++) + if (s[p.id]) { + c.outputs.forEach((m) => s[m.id] = true), h = true, r[c.id] = true; + break; + } + if (h) + break; + } + } + let a = {}; + a[n.id] = true; + let i = {}; + for (let u = e.length - 1; u >= 0; u--) { + let c = e[u], l = c.inputs; + for (let d = 0; d < c.outputs.length; d++) + if (a[c.outputs[d].id]) { + for (let p in l) + a[l[p].id] = true, i[c.id] = true; + break; + } + } + let o = []; + for (let u = 0; u < e.length; u++) { + let c = e[u]; + if (r[c.id] && i[c.id]) { + let l = {}; + for (let p in c.inputs) { + let h = c.inputs[p]; + s[h.id] && (l[p] = h); + } + let d = Object.assign({}, c); + d.inputs = l, d.outputs = c.outputs, o.push(d); + } + } + return o; +} +function b$(e, t, n, s) { + for (let r = t.length - 1; r >= 0; r--) { + let a = t[r], i = []; + if (a.outputs.forEach((u) => { + let c = e[u.id]; + c != null ? i.push(c) : i.push(null); + }), a.gradient == null) + throw new Error(`Cannot compute gradient: gradient function not found for ${a.kernelName}.`); + let o = a.gradient(i); + for (let u in a.inputs) { + if (!(u in o)) + throw new Error(`Cannot backprop through input ${u}. Available gradients found: ${Object.keys(o)}.`); + let c = n(() => o[u]()); + if (c.dtype !== "float32") + throw new Error(`Error in gradient for op ${a.kernelName}. The gradient of input ${u} must have 'float32' dtype, but has '${c.dtype}'`); + let l = a.inputs[u]; + if (!Sr(c.shape, l.shape)) + throw new Error(`Error in gradient for op ${a.kernelName}. The gradient of input '${u}' has shape '${c.shape}', which does not match the shape of the input '${l.shape}'`); + if (e[l.id] == null) + e[l.id] = c; + else { + let d = e[l.id]; + e[l.id] = s(d, c), d.dispose(); + } + } + } +} +var Ov = 20; +var ku = 3; +var xf = 7; +function y$(e, t, n, s) { + let r = no(t), a = v$(e, t, n, r), i = t.length, o = qc(e, t, n, r, a), u = ["Tensor"]; + return s && (u.push(` dtype: ${n}`), u.push(` rank: ${i}`), u.push(` shape: [${t}]`), u.push(" values:")), u.push(o.map((c) => " " + c).join(` +`)), u.join(` +`); +} +function v$(e, t, n, s) { + let r = pt(t), a = s[s.length - 1], i = new Array(a).fill(0), o = t.length, u = n === "complex64" ? Tu(e) : e; + if (o > 1) + for (let c = 0; c < r / a; c++) { + let l = c * a; + for (let d = 0; d < a; d++) + i[d] = Math.max(i[d], Nu(u[l + d], 0, n).length); + } + return i; +} +function Nu(e, t, n) { + let s; + return Array.isArray(e) ? s = `${parseFloat(e[0].toFixed(xf))} + ${parseFloat(e[1].toFixed(xf))}j` : lr(e) ? s = `'${e}'` : n === "bool" ? s = Aw(e) : s = parseFloat(e.toFixed(xf)).toString(), Pu(s, t); +} +function Aw(e) { + return e === 0 ? "false" : "true"; +} +function qc(e, t, n, s, r, a = true) { + let i = n === "complex64" ? 2 : 1, o = t[0], u = t.length; + if (u === 0) { + if (n === "complex64") { + let m = Tu(e); + return [Nu(m[0], 0, n)]; + } + return n === "bool" ? [Aw(e[0])] : [e[0].toString()]; + } + if (u === 1) { + if (o > Ov) { + let g = ku * i, b = Array.from(e.slice(0, g)), y = Array.from(e.slice((o - ku) * i, o * i)); + return n === "complex64" && (b = Tu(b), y = Tu(y)), ["[" + b.map((v, x) => Nu(v, r[x], n)).join(", ") + ", ..., " + y.map((v, x) => Nu(v, r[o - ku + x], n)).join(", ") + "]"]; + } + let m = n === "complex64" ? Tu(e) : Array.from(e); + return ["[" + m.map((g, b) => Nu(g, r[b], n)).join(", ") + "]"]; + } + let c = t.slice(1), l = s.slice(1), d = s[0] * i, p = []; + if (o > Ov) { + for (let m = 0; m < ku; m++) { + let g = m * d, b = g + d; + p.push(...qc(e.slice(g, b), c, n, l, r, false)); + } + p.push("..."); + for (let m = o - ku; m < o; m++) { + let g = m * d, b = g + d; + p.push(...qc(e.slice(g, b), c, n, l, r, m === o - 1)); + } + } else + for (let m = 0; m < o; m++) { + let g = m * d, b = g + d; + p.push(...qc(e.slice(g, b), c, n, l, r, m === o - 1)); + } + let h = u === 2 ? "," : ""; + p[0] = "[" + p[0] + h; + for (let m = 1; m < p.length - 1; m++) + p[m] = " " + p[m] + h; + let f = `, +`; + for (let m = 2; m < u; m++) + f += ` +`; + return p[p.length - 1] = " " + p[p.length - 1] + "]" + (a ? "" : f), p; +} +function Tu(e) { + let t = []; + for (let n = 0; n < e.length; n += 2) + t.push([e[n], e[n + 1]]); + return t; +} +var Vt = class { + constructor(e, t, n) { + if (this.dtype = t, this.shape = e.slice(), this.size = pt(e), n != null) { + let s = n.length; + O(s === this.size, () => `Length of values '${s}' does not match the size inferred by the shape '${this.size}'.`); + } + if (t === "complex64") + throw new Error("complex64 dtype TensorBuffers are not supported. Please create a TensorBuffer for the real and imaginary parts separately and call tf.complex(real, imag)."); + this.values = n || yw(t, this.size), this.strides = no(e); + } + set(e, ...t) { + t.length === 0 && (t = [0]), O(t.length === this.rank, () => `The number of provided coordinates (${t.length}) must match the rank (${this.rank})`); + let n = this.locToIndex(t); + this.values[n] = e; + } + get(...e) { + e.length === 0 && (e = [0]); + let t = 0; + for (let s of e) { + if (s < 0 || s >= this.shape[t]) { + let r = `Requested out of range element at ${e}. Buffer shape=${this.shape}`; + throw new Error(r); + } + t++; + } + let n = e[e.length - 1]; + for (let s = 0; s < e.length - 1; ++s) + n += this.strides[s] * e[s]; + return this.values[n]; + } + locToIndex(e) { + if (this.rank === 0) + return 0; + if (this.rank === 1) + return e[0]; + let t = e[e.length - 1]; + for (let n = 0; n < e.length - 1; ++n) + t += this.strides[n] * e[n]; + return t; + } + indexToLoc(e) { + if (this.rank === 0) + return []; + if (this.rank === 1) + return [e]; + let t = new Array(this.shape.length); + for (let n = 0; n < t.length - 1; ++n) + t[n] = Math.floor(e / this.strides[n]), e -= t[n] * this.strides[n]; + return t[t.length - 1] = e, t; + } + get rank() { + return this.shape.length; + } + toTensor() { + return is().makeTensor(this.values, this.shape, this.dtype); + } +}; +var is = null; +var Mi = null; +var x$ = null; +function w$(e) { + is = e; +} +function k$(e) { + Mi = e; +} +function I$(e) { + x$ = e; +} +var tt = class { + constructor(e, t, n, s) { + this.kept = false, this.isDisposedInternal = false, this.shape = e.slice(), this.dtype = t || "float32", this.size = pt(e), this.strides = no(e), this.dataId = n, this.id = s, this.rankType = this.rank < 5 ? this.rank.toString() : "higher"; + } + get rank() { + return this.shape.length; + } + async buffer() { + let e = await this.data(); + return Mi.buffer(this.shape, this.dtype, e); + } + bufferSync() { + return Mi.buffer(this.shape, this.dtype, this.dataSync()); + } + async array() { + let e = await this.data(); + return Wi(this.shape, e, this.dtype === "complex64"); + } + arraySync() { + return Wi(this.shape, this.dataSync(), this.dtype === "complex64"); + } + async data() { + this.throwIfDisposed(); + let e = is().read(this.dataId); + if (this.dtype === "string") { + let t = await e; + try { + return t.map((n) => od(n)); + } catch (n) { + throw new Error("Failed to decode the string bytes into utf-8. To get the original bytes, call tensor.bytes()."); + } + } + return e; + } + dataToGPU(e) { + return this.throwIfDisposed(), is().readToGPU(this.dataId, e); + } + dataSync() { + this.throwIfDisposed(); + let e = is().readSync(this.dataId); + if (this.dtype === "string") + try { + return e.map((t) => od(t)); + } catch (t) { + throw new Error("Failed to decode the string bytes into utf-8. To get the original bytes, call tensor.bytes()."); + } + return e; + } + async bytes() { + this.throwIfDisposed(); + let e = await is().read(this.dataId); + return this.dtype === "string" ? e : new Uint8Array(e.buffer); + } + dispose() { + this.isDisposed || (is().disposeTensor(this), this.isDisposedInternal = true); + } + get isDisposed() { + return this.isDisposedInternal; + } + throwIfDisposed() { + if (this.isDisposed) + throw new Error("Tensor is disposed."); + } + print(e = false) { + return Mi.print(this, e); + } + clone() { + return this.throwIfDisposed(), Mi.clone(this); + } + toString(e = false) { + let t = this.dataSync(); + return y$(t, this.shape, this.dtype, e); + } + cast(e) { + return this.throwIfDisposed(), Mi.cast(this, e); + } + variable(e = true, t, n) { + return this.throwIfDisposed(), is().makeVariable(this, e, t, n); + } +}; +Object.defineProperty(tt, Symbol.hasInstance, { value: (e) => !!e && e.data != null && e.dataSync != null && e.throwIfDisposed != null }); +function S$() { + return Gm("Tensor", () => tt); +} +S$(); +var ud = class extends tt { + constructor(e, t, n, s) { + super(e.shape, e.dtype, e.dataId, s); + this.trainable = t, this.name = n; + } + assign(e) { + if (e.dtype !== this.dtype) + throw new Error(`dtype of the new value (${e.dtype}) and previous value (${this.dtype}) must match`); + if (!Sr(e.shape, this.shape)) + throw new Error(`shape of the new value (${e.shape}) and previous value (${this.shape}) must match`); + is().disposeTensor(this), this.dataId = e.dataId, is().incRef(this, null); + } + dispose() { + is().disposeVariable(this), this.isDisposedInternal = true; + } +}; +Object.defineProperty(ud, Symbol.hasInstance, { value: (e) => e instanceof tt && e.assign != null && e.assign instanceof Function }); +var Ts = {}; +Ae(Ts, { assertTypesMatch: () => Fw, getTensorsInContainer: () => bg, isTensorInList: () => T$, makeTypesMatch: () => vt }); +var C$ = ((e) => (e.R0 = "R0", e.R1 = "R1", e.R2 = "R2", e.R3 = "R3", e.R4 = "R4", e.R5 = "R5", e.R6 = "R6", e))(C$ || {}); +var Ew = ((e) => (e.float32 = "float32", e.int32 = "int32", e.bool = "int32", e.complex64 = "complex64", e))(Ew || {}); +var _w = ((e) => (e.float32 = "float32", e.int32 = "int32", e.bool = "bool", e.complex64 = "complex64", e))(_w || {}); +var Rw = ((e) => (e.float32 = "float32", e.int32 = "float32", e.bool = "float32", e.complex64 = "complex64", e))(Rw || {}); +var Dw = ((e) => (e.float32 = "complex64", e.int32 = "complex64", e.bool = "complex64", e.complex64 = "complex64", e))(Dw || {}); +var N$ = { float32: Rw, int32: Ew, bool: _w, complex64: Dw }; +function vn(e, t) { + if (e === "string" || t === "string") { + if (e === "string" && t === "string") + return "string"; + throw new Error(`Can not upcast ${e} with ${t}`); + } + return N$[e][t]; +} +function np(e) { + return vn(e, "int32"); +} +function vt(e, t) { + if (e.dtype === t.dtype) + return [e, t]; + let n = vn(e.dtype, t.dtype); + return [e.cast(n), t.cast(n)]; +} +function Fw(e, t) { + O(e.dtype === t.dtype, () => `The dtypes of the first(${e.dtype}) and second(${t.dtype}) input must match`); +} +function T$(e, t) { + return t.some((n) => n.id === e.id); +} +function bg(e) { + let t = []; + return Ow(e, t, /* @__PURE__ */ new Set()), t; +} +function Ow(e, t, n) { + if (e == null) + return; + if (e instanceof tt) { + t.push(e); + return; + } + if (!$$(e)) + return; + let s = e; + for (let r in s) { + let a = s[r]; + n.has(a) || (n.add(a), Ow(a, t, n)); + } +} +function $$(e) { + return Array.isArray(e) || typeof e == "object"; +} +function wf(e) { + return e.kernelName != null; +} +var Pv = class { + constructor() { + this.registeredVariables = {}, this.nextTapeNodeId = 0, this.numBytes = 0, this.numTensors = 0, this.numStringTensors = 0, this.numDataBuffers = 0, this.gradientDepth = 0, this.kernelDepth = 0, this.scopeStack = [], this.numDataMovesStack = [], this.nextScopeId = 0, this.tensorInfo = /* @__PURE__ */ new WeakMap(), this.profiling = false, this.activeProfile = { newBytes: 0, newTensors: 0, peakBytes: 0, kernels: [], result: null, get kernelNames() { + return Array.from(new Set(this.kernels.map((e) => e.name))); + } }; + } + dispose() { + for (let e in this.registeredVariables) + this.registeredVariables[e].dispose(); + } +}; +var Vf = class { + constructor(e) { + this.ENV = e, this.registry = {}, this.registryFactory = {}, this.pendingBackendInitId = 0, this.state = new Pv(); + } + async ready() { + if (this.pendingBackendInit != null) + return this.pendingBackendInit.then(() => { + }); + if (this.backendInstance != null) + return; + let e = this.getSortedBackends(); + for (let t = 0; t < e.length; t++) { + let n = e[t]; + if (await this.initializeBackend(n).success) { + await this.setBackend(n); + return; + } + } + throw new Error("Could not initialize any backends, all backend initializations failed."); + } + get backend() { + if (this.pendingBackendInit != null) + throw new Error(`Backend '${this.backendName}' has not yet been initialized. Make sure to await tf.ready() or await tf.setBackend() before calling other methods`); + if (this.backendInstance == null) { + let { name: e, asyncInit: t } = this.initializeBackendsAndReturnBest(); + if (t) + throw new Error(`The highest priority backend '${e}' has not yet been initialized. Make sure to await tf.ready() or await tf.setBackend() before calling other methods`); + this.setBackend(e); + } + return this.backendInstance; + } + backendNames() { + return Object.keys(this.registryFactory); + } + findBackend(e) { + if (!(e in this.registry)) + if (e in this.registryFactory) { + let { asyncInit: t } = this.initializeBackend(e); + if (t) + return null; + } else + return null; + return this.registry[e]; + } + findBackendFactory(e) { + return e in this.registryFactory ? this.registryFactory[e].factory : null; + } + registerBackend(e, t, n = 1) { + return e in this.registryFactory ? (ur(`${e} backend was already registered. Reusing existing backend factory.`), false) : (this.registryFactory[e] = { factory: t, priority: n }, true); + } + async setBackend(e) { + if (this.registryFactory[e] == null) + throw new Error(`Backend name '${e}' not found in registry`); + if (this.backendName = e, this.registry[e] == null) { + this.backendInstance = null; + let { success: t, asyncInit: n } = this.initializeBackend(e); + if (!(n ? await t : t)) + return false; + } + return this.backendInstance = this.registry[e], this.setupRegisteredKernels(), this.profiler = new h$(this.backendInstance), true; + } + setupRegisteredKernels() { + Lf(this.backendName).forEach((t) => { + t.setupFunc != null && t.setupFunc(this.backendInstance); + }); + } + disposeRegisteredKernels(e) { + Lf(e).forEach((n) => { + n.disposeFunc != null && n.disposeFunc(this.registry[e]); + }); + } + initializeBackend(e) { + let t = this.registryFactory[e]; + if (t == null) + throw new Error(`Cannot initialize backend ${e}, no registration found.`); + try { + let n = t.factory(); + if (n && !(n instanceof tl) && typeof n.then == "function") { + let s = ++this.pendingBackendInitId, r = n.then((a) => s < this.pendingBackendInitId ? false : (this.registry[e] = a, this.pendingBackendInit = null, true)).catch((a) => (s < this.pendingBackendInitId || (this.pendingBackendInit = null, ur(`Initialization of backend ${e} failed`), ur(a.stack || a.message)), false)); + return this.pendingBackendInit = r, { success: r, asyncInit: true }; + } else + return this.registry[e] = n, { success: true, asyncInit: false }; + } catch (n) { + return ur(`Initialization of backend ${e} failed`), ur(n.stack || n.message), { success: false, asyncInit: false }; + } + } + removeBackend(e) { + if (!(e in this.registryFactory)) + throw new Error(`${e} backend not found in registry`); + this.backendName === e && this.pendingBackendInit != null && this.pendingBackendInitId++, e in this.registry && (this.disposeRegisteredKernels(e), this.registry[e].dispose(), delete this.registry[e]), delete this.registryFactory[e], this.backendName === e && (this.pendingBackendInit = null, this.backendName = null, this.backendInstance = null); + } + getSortedBackends() { + if (Object.keys(this.registryFactory).length === 0) + throw new Error("No backend found in registry."); + return Object.keys(this.registryFactory).sort((e, t) => this.registryFactory[t].priority - this.registryFactory[e].priority); + } + initializeBackendsAndReturnBest() { + let e = this.getSortedBackends(); + for (let t = 0; t < e.length; t++) { + let n = e[t], { success: s, asyncInit: r } = this.initializeBackend(n); + if (r || s) + return { name: n, asyncInit: r }; + } + throw new Error("Could not initialize any backends, all backend initializations failed."); + } + moveData(e, t) { + let n = this.state.tensorInfo.get(t), s = n.backend, r = this.readSync(t), a = s.refCount(t); + s.disposeData(t, true), n.backend = e, e.move(t, r, n.shape, n.dtype, a), this.shouldCheckForMemLeaks() && this.state.numDataMovesStack[this.state.numDataMovesStack.length - 1]++; + } + tidy(e, t) { + let n = null; + if (t == null) { + if (typeof e != "function") + throw new Error("Please provide a function to tidy()"); + t = e; + } else { + if (typeof e != "string" && !(e instanceof String)) + throw new Error("When calling with two arguments, the first argument to tidy() must be a string"); + if (typeof t != "function") + throw new Error("When calling with two arguments, the 2nd argument to tidy() must be a function"); + n = e; + } + let s; + return this.scopedRun(() => this.startScope(n), () => this.endScope(s), () => (s = t(), s instanceof Promise && console.error("Cannot return a Promise inside of tidy."), s)); + } + scopedRun(e, t, n) { + e(); + try { + let s = n(); + return t(), s; + } catch (s) { + throw t(), s; + } + } + nextTensorId() { + return Vf.nextTensorId++; + } + nextVariableId() { + return Vf.nextVariableId++; + } + clone(e) { + let t = L.runKernel(La, { x: e }), n = { x: e }, s = (a) => ({ x: () => { + let i = "float32", o = { x: a }, u = { dtype: i }; + return L.runKernel(Ca, o, u); + } }), r = []; + return this.addTapeNode(this.state.activeScope.name, n, [t], s, r, {}), t; + } + runKernel(e, t, n) { + if (this.backendName == null && this.backend, !(Mf(e, this.backendName) != null)) + throw new Error(`Kernel '${e}' not registered for backend '${this.backendName}'`); + return this.runKernelFunc({ kernelName: e, inputs: t, attrs: n }); + } + shouldCheckForMemLeaks() { + return this.ENV.getBool("IS_TEST"); + } + checkKernelForMemLeak(e, t, n) { + let s = this.backend.numDataIds(), r = 0; + n.forEach((o) => { + r += o.dtype === "complex64" ? 3 : 1; + }); + let a = this.state.numDataMovesStack[this.state.numDataMovesStack.length - 1], i = s - t - r - a; + if (i > 0) + throw new Error(`Backend '${this.backendName}' has an internal memory leak (${i} data ids) after running '${e}'`); + } + runKernelFunc(e) { + let t, n = [], s = this.isTapeOn(), r = this.state.numBytes, a = this.state.numTensors; + this.shouldCheckForMemLeaks() && this.state.numDataMovesStack.push(0); + let i; + this.backendName == null && this.backend; + let o, u = wf(e) ? e.kernelName : this.state.activeScope != null ? this.state.activeScope.name : ""; + if (wf(e)) { + let { kernelName: h, inputs: f, attrs: m } = e; + this.backendName == null && this.backend; + let g = Mf(h, this.backendName); + O(g != null, () => `Cannot find registered kernel '${h}' for backend '${this.backendName}'`), i = () => { + let b = this.backend.numDataIds(); + o = g.kernelFunc({ inputs: f, attrs: m, backend: this.backend }); + let y = Array.isArray(o) ? o : [o]; + this.shouldCheckForMemLeaks() && this.checkKernelForMemLeak(h, b, y); + let v = y.map((x) => { + if (x.rank != null) + return x; + let { dataId: k, shape: T, dtype: C } = x; + return this.makeTensorFromDataId(k, T, C); + }); + if (s) { + let x = this.getTensorsForGradient(h, f, v); + n = this.saveTensorsForBackwardMode(x); + } + return v; + }; + } else { + let { forwardFunc: h } = e, f = (m) => { + !s || (n = m.map((g) => this.keep(this.clone(g)))); + }; + i = () => { + let m = this.backend.numDataIds(); + o = this.tidy(() => h(this.backend, f)); + let g = Array.isArray(o) ? o : [o]; + return this.shouldCheckForMemLeaks() && this.checkKernelForMemLeak(u, m, g), g; + }; + } + let { inputs: c, attrs: l } = e, d = wf(e) ? null : e.backwardsFunc, p; + return this.scopedRun(() => this.state.kernelDepth++, () => this.state.kernelDepth--, () => { + !this.ENV.getBool("DEBUG") && !this.state.profiling ? t = i() : (p = this.profiler.profileKernel(u, c, () => i()), this.ENV.getBool("DEBUG") && this.profiler.logKernelProfile(p), t = p.outputs); + }), s && this.addTapeNode(u, c, t, d, n, l), this.state.profiling && this.state.activeProfile.kernels.push({ name: u, bytesAdded: this.state.numBytes - r, totalBytesSnapshot: this.state.numBytes, tensorsAdded: this.state.numTensors - a, totalTensorsSnapshot: this.state.numTensors, inputShapes: Object.keys(c).map((h) => c[h] != null ? c[h].shape : null), outputShapes: t.map((h) => h.shape), kernelTimeMs: p.timeMs, extraInfo: p.extraInfo }), Array.isArray(o) ? t : t[0]; + } + saveTensorsForBackwardMode(e) { + return e.map((n) => this.keep(this.clone(n))); + } + getTensorsForGradient(e, t, n) { + let s = Rv(e); + if (s != null) { + let r = s.inputsToSave || [], a = s.outputsToSave || [], i; + s.saveAllInputs ? (O(Array.isArray(t), () => "saveAllInputs is true, expected inputs to be an array."), i = Object.keys(t).map((u) => t[u])) : i = r.map((u) => t[u]); + let o = n.filter((u, c) => a[c]); + return i.concat(o); + } + return []; + } + makeTensor(e, t, n, s) { + if (e == null) + throw new Error("Values passed to engine.makeTensor() are null"); + n = n || "float32", s = s || this.backend; + let r = e; + n === "string" && lr(e[0]) && (r = e.map((o) => _l(o))); + let a = s.write(r, t, n), i = new tt(t, n, a, this.nextTensorId()); + if (this.trackTensor(i, s), n === "string") { + let o = this.state.tensorInfo.get(a), u = ww(r); + this.state.numBytes += u - o.bytes, o.bytes = u; + } + return i; + } + makeTensorFromDataId(e, t, n, s) { + n = n || "float32"; + let r = new tt(t, n, e, this.nextTensorId()); + return this.trackTensor(r, s), r; + } + makeVariable(e, t = true, n, s) { + n = n || this.nextVariableId().toString(), s != null && s !== e.dtype && (e = e.cast(s)); + let r = new ud(e, t, n, this.nextTensorId()); + if (this.state.registeredVariables[r.name] != null) + throw new Error(`Variable with name ${r.name} was already registered`); + return this.state.registeredVariables[r.name] = r, this.incRef(r, this.backend), r; + } + trackTensor(e, t) { + this.state.numTensors++, e.dtype === "string" && this.state.numStringTensors++; + let n = 0; + e.dtype !== "complex64" && e.dtype !== "string" && (n = e.size * Of(e.dtype)), this.state.numBytes += n, this.state.tensorInfo.has(e.dataId) || (this.state.numDataBuffers++, this.state.tensorInfo.set(e.dataId, { backend: t || this.backend, dtype: e.dtype, shape: e.shape, bytes: n })), e instanceof ud || this.track(e); + } + incRef(e, t) { + this.trackTensor(e, t), this.backend.incRef(e.dataId); + } + removeDataId(e, t) { + this.state.tensorInfo.has(e) && this.state.tensorInfo.get(e).backend === t && (this.state.tensorInfo.delete(e), this.state.numDataBuffers--); + } + disposeTensor(e) { + if (!this.state.tensorInfo.has(e.dataId)) + return; + let t = this.state.tensorInfo.get(e.dataId); + if (this.state.numTensors--, e.dtype === "string" && (this.state.numStringTensors--, this.state.numBytes -= t.bytes), e.dtype !== "complex64" && e.dtype !== "string") { + let n = e.size * Of(e.dtype); + this.state.numBytes -= n; + } + t.backend.disposeData(e.dataId) && this.removeDataId(e.dataId, t.backend); + } + disposeVariables() { + for (let e in this.state.registeredVariables) { + let t = this.state.registeredVariables[e]; + this.disposeVariable(t); + } + } + disposeVariable(e) { + this.disposeTensor(e), this.state.registeredVariables[e.name] != null && delete this.state.registeredVariables[e.name]; + } + memory() { + let e = this.backend.memory(); + return e.numTensors = this.state.numTensors, e.numDataBuffers = this.state.numDataBuffers, e.numBytes = this.state.numBytes, this.state.numStringTensors > 0 && (e.unreliable = true, e.reasons == null && (e.reasons = []), e.reasons.push("Memory usage by string tensors is approximate (2 bytes per character)")), e; + } + async profile(e) { + this.state.profiling = true; + let t = this.state.numBytes, n = this.state.numTensors; + this.state.activeProfile.kernels = [], this.state.activeProfile.result = await e(), this.state.profiling = false, this.state.activeProfile.peakBytes = Math.max(...this.state.activeProfile.kernels.map((s) => s.totalBytesSnapshot)), this.state.activeProfile.newBytes = this.state.numBytes - t, this.state.activeProfile.newTensors = this.state.numTensors - n; + for (let s of this.state.activeProfile.kernels) + s.kernelTimeMs = await s.kernelTimeMs, s.extraInfo = await s.extraInfo; + return this.state.activeProfile; + } + isTapeOn() { + return this.state.gradientDepth > 0 && this.state.kernelDepth === 0; + } + addTapeNode(e, t, n, s, r, a) { + let i = { id: this.state.nextTapeNodeId++, kernelName: e, inputs: t, outputs: n, saved: r }, o = Rv(e); + o != null && (s = o.gradFunc), s != null && (i.gradient = (u) => (u = u.map((c, l) => { + if (c == null) { + let d = n[l], p = zd(d.size, d.dtype); + return this.makeTensor(p, d.shape, d.dtype); + } + return c; + }), s(u.length > 1 ? u : u[0], r, a))), this.state.activeTape.push(i); + } + keep(e) { + return e.kept = true, e; + } + startTape() { + this.state.gradientDepth === 0 && (this.state.activeTape = []), this.state.gradientDepth++; + } + endTape() { + this.state.gradientDepth--; + } + startScope(e) { + let t = { track: [], name: "unnamed scope", id: this.state.nextScopeId++ }; + e && (t.name = e), this.state.scopeStack.push(t), this.state.activeScope = t; + } + endScope(e) { + let t = bg(e), n = new Set(t.map((r) => r.id)); + for (let r = 0; r < this.state.activeScope.track.length; r++) { + let a = this.state.activeScope.track[r]; + !a.kept && !n.has(a.id) && a.dispose(); + } + let s = this.state.scopeStack.pop(); + this.state.activeScope = this.state.scopeStack.length === 0 ? null : this.state.scopeStack[this.state.scopeStack.length - 1], t.forEach((r) => { + !r.kept && r.scopeId === s.id && this.track(r); + }); + } + gradients(e, t, n, s = false) { + if (O(t.length > 0, () => "gradients() received an empty list of xs."), n != null && n.dtype !== "float32") + throw new Error(`dy must have 'float32' dtype, but has '${n.dtype}'`); + let r = this.scopedRun(() => this.startTape(), () => this.endTape(), () => this.tidy("forward", e)); + O(r instanceof tt, () => "The result y returned by f() must be a tensor."); + let a = g$(this.state.activeTape, t, r); + if (!s && a.length === 0 && t.length > 0) + throw new Error("Cannot compute gradient of y=f(x) with respect to x. Make sure that the f you passed encloses all operations that lead from x to y."); + return this.tidy("backward", () => { + let i = {}; + i[r.id] = n == null ? A$(r.shape) : n, b$(i, a, (u) => this.tidy(u), E$); + let o = t.map((u) => i[u.id]); + return this.state.gradientDepth === 0 && (this.state.activeTape.forEach((u) => { + for (let c of u.saved) + c.dispose(); + }), this.state.activeTape = null), { value: r, grads: o }; + }); + } + customGrad(e) { + return O(gr(e), () => "The f passed in customGrad(f) must be a function."), (...t) => { + O(t.every((i) => i instanceof tt), () => "The args passed in customGrad(f)(x1, x2,...) must all be tensors"); + let n, s = {}; + t.forEach((i, o) => { + s[o] = i; + }); + let r = (i, o) => (n = e(...t, o), O(n.value instanceof tt, () => "The function f passed in customGrad(f) must return an object where `obj.value` is a tensor"), O(gr(n.gradFunc), () => "The function f passed in customGrad(f) must return an object where `obj.gradFunc` is a function."), n.value), a = (i, o) => { + let u = n.gradFunc(i, o), c = Array.isArray(u) ? u : [u]; + O(c.length === t.length, () => "The function f passed in customGrad(f) must return an object where `obj.gradFunc` is a function that returns the same number of tensors as inputs passed to f(...)."), O(c.every((d) => d instanceof tt), () => "The function f passed in customGrad(f) must return an object where `obj.gradFunc` is a function that returns a list of only tensors."); + let l = {}; + return c.forEach((d, p) => { + l[p] = () => d; + }), l; + }; + return this.runKernelFunc({ forwardFunc: r, backwardsFunc: a, inputs: s }); + }; + } + readSync(e) { + return this.state.tensorInfo.get(e).backend.readSync(e); + } + read(e) { + return this.state.tensorInfo.get(e).backend.read(e); + } + readToGPU(e, t) { + return this.state.tensorInfo.get(e).backend.readToGPU(e, t); + } + async time(e) { + let t = Wu(), n = await this.backend.time(e); + return n.wallMs = Wu() - t, n; + } + track(e) { + return this.state.activeScope != null && (e.scopeId = this.state.activeScope.id, this.state.activeScope.track.push(e)), e; + } + get registeredVariables() { + return this.state.registeredVariables; + } + reset() { + this.pendingBackendInitId++, this.state.dispose(), this.ENV.reset(), this.state = new Pv(); + for (let e in this.registry) + this.disposeRegisteredKernels(e), this.registry[e].dispose(), delete this.registry[e]; + this.backendName = null, this.backendInstance = null, this.pendingBackendInit = null; + } +}; +var yg = Vf; +yg.nextTensorId = 0; +yg.nextVariableId = 0; +function A$(e) { + let t = Vm(pt(e), "float32"); + return L.makeTensor(t, e, "float32"); +} +function Pw() { + let e = Nw(); + if (e._tfengine == null) { + let t = new XT(e); + e._tfengine = new yg(t); + } + return JT(e._tfengine.ENV), w$(() => e._tfengine), e._tfengine; +} +var L = Pw(); +function E$(e, t) { + let n = { a: e, b: t }; + return L.runKernel(Cr, n); +} +var Rl = {}; +Ae(Rl, { isBrowser: () => zw, isMobile: () => D$, mockIsMobile: () => R$ }); +function _$() { + return typeof navigator != "undefined" && navigator != null; +} +var Wf; +function R$(e) { + Wf = e; +} +function D$(e) { + if (Wf !== void 0) + return Wf; + if (e || _$()) { + if (e || (e = navigator), e.product === "ReactNative") + return true; + let t = e.userAgent || e.vendor || (typeof window != "undefined" ? window.opera : ""); + if (!t) { + let n = e; + return n.userAgentData && n.userAgentData.mobile; + } + return /(android|bb\d+|meego).+mobile|avantgo|bada\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\.(browser|link)|vodafone|wap|windows ce|xda|xiino/i.test(t) || /1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\-(n|u)|c55\/|capi|ccwa|cdm\-|cell|chtm|cldc|cmd\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\-s|devi|dica|dmob|do(c|p)o|ds(12|\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\-|_)|g1 u|g560|gene|gf\-5|g\-mo|go(\.w|od)|gr(ad|un)|haie|hcit|hd\-(m|p|t)|hei\-|hi(pt|ta)|hp( i|ip)|hs\-c|ht(c(\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\-(20|go|ma)|i230|iac( |\-|\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\/)|klon|kpt |kwc\-|kyo(c|k)|le(no|xi)|lg( g|\/(k|l|u)|50|54|\-[a-w])|libw|lynx|m1\-w|m3ga|m50\/|ma(te|ui|xo)|mc(01|21|ca)|m\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\-2|po(ck|rt|se)|prox|psio|pt\-g|qa\-a|qc(07|12|21|32|60|\-[2-7]|i\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\-|oo|p\-)|sdk\/|se(c(\-|0|1)|47|mc|nd|ri)|sgh\-|shar|sie(\-|m)|sk\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\-|v\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\-|tdg\-|tel(i|m)|tim\-|t\-mo|to(pl|sh)|ts(70|m\-|m3|m5)|tx\-9|up(\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\-|your|zeto|zte\-/i.test(t.substr(0, 4)); + } + return false; +} +function zw() { + return typeof window != "undefined" && window.document != null || typeof WorkerGlobalScope != "undefined"; +} +var fs = X(); +fs.registerFlag("DEBUG", () => false, (e) => { + e && console.warn("Debugging mode is ON. The output of every math call will be downloaded to CPU and checked for NaNs. This significantly impacts performance."); +}); +fs.registerFlag("IS_BROWSER", () => zw()); +fs.registerFlag("IS_NODE", () => typeof process != "undefined" && typeof process.versions != "undefined" && typeof process.versions.node != "undefined"); +fs.registerFlag("IS_CHROME", () => typeof navigator != "undefined" && navigator != null && navigator.userAgent != null && /Chrome/.test(navigator.userAgent) && /Google Inc/.test(navigator.vendor)); +fs.registerFlag("PROD", () => false); +fs.registerFlag("TENSORLIKE_CHECK_SHAPE_CONSISTENCY", () => fs.getBool("DEBUG")); +fs.registerFlag("DEPRECATION_WARNINGS_ENABLED", () => true); +fs.registerFlag("IS_TEST", () => false); +fs.registerFlag("CHECK_COMPUTATION_FOR_ERRORS", () => true); +fs.registerFlag("WRAP_TO_IMAGEBITMAP", () => false); +function Es(e, t) { + let n = e; + if (Zt(e)) + return t === "string" ? [] : [e.length]; + if (!Array.isArray(e)) + return []; + let s = []; + for (; Array.isArray(n) || Zt(n) && t !== "string"; ) + s.push(n.length), n = n[0]; + return Array.isArray(e) && X().getBool("TENSORLIKE_CHECK_SHAPE_CONSISTENCY") && Mw(e, s, []), s; +} +function Mw(e, t, n) { + if (n = n || [], !Array.isArray(e) && !Zt(e)) { + O(t.length === 0, () => `Element arr[${n.join("][")}] is a primitive, but should be an array/TypedArray of ${t[0]} elements`); + return; + } + O(t.length > 0, () => `Element arr[${n.join("][")}] should be a primitive, but is an array of ${e.length} elements`), O(e.length === t[0], () => `Element arr[${n.join("][")}] should have ${t[0]} elements, but has ${e.length} elements`); + let s = t.slice(1); + for (let r = 0; r < e.length; ++r) + Mw(e[r], s, n.concat(r)); +} +function zv(e, t, n, s) { + if (e !== "string_or_numeric") { + if (e == null) + throw new Error("Expected dtype cannot be null."); + if (e !== "numeric" && e !== t || e === "numeric" && t === "string") + throw new Error(`Argument '${n}' passed to '${s}' must be ${e} tensor, but got ${t} tensor`); + } +} +function E(e, t, n, s = "numeric") { + if (e instanceof tt) + return zv(s, e.dtype, t, n), e; + let r = Pd(e); + if (r !== "string" && ["bool", "int32", "float32"].indexOf(s) >= 0 && (r = s), zv(s, r, t, n), e == null || !Zt(e) && !Array.isArray(e) && typeof e != "number" && typeof e != "boolean" && typeof e != "string") { + let u = e == null ? "null" : e.constructor.name; + throw new Error(`Argument '${t}' passed to '${n}' must be a Tensor or TensorLike, but got '${u}'`); + } + let a = Es(e, r); + !Zt(e) && !Array.isArray(e) && (e = [e]); + let o = r !== "string" ? tp(e, r) : na(e, [], true); + return L.makeTensor(o, a, r); +} +function Uu(e, t, n, s = "numeric") { + if (!Array.isArray(e)) + throw new Error(`Argument ${t} passed to ${n} must be a \`Tensor[]\` or \`TensorLike[]\``); + return e.map((a, i) => E(a, `${t}[${i}]`, n, s)); +} +var F$ = "__op"; +function B(e) { + let t = Object.keys(e); + if (t.length !== 1) + throw new Error(`Please provide an object with a single key (operation name) mapping to a function. Got an object with ${t.length} keys.`); + let n = t[0], s = e[n]; + n.endsWith("_") && (n = n.substring(0, n.length - 1)), n = n + F$; + let r = (...a) => { + L.startScope(n); + try { + let i = s(...a); + return Um(i) && console.error("Cannot return a Promise inside of tidy."), L.endScope(i), i; + } catch (i) { + throw L.endScope(null), i; + } + }; + return Object.defineProperty(r, "name", { value: n, configurable: true }), r; +} +function O$(e, t) { + let n = E(e, "real", "complex"), s = E(t, "imag", "complex"); + on(n.shape, s.shape, `real and imag shapes, ${n.shape} and ${s.shape}, must match in call to tf.complex().`); + let r = { real: n, imag: s }; + return L.runKernel(Ld, r); +} +var ia = B({ complex_: O$ }); +function $r(e, t, n, s) { + if (s == null && (s = Pd(e)), s === "complex64") + throw new Error("Cannot construct a complex64 tensor directly. Please use tf.complex(real, imag)."); + if (!Zt(e) && !Array.isArray(e) && typeof e != "number" && typeof e != "boolean" && typeof e != "string") + throw new Error("values passed to tensor(values) must be a number/boolean/string or an array of numbers/booleans/strings, or a TypedArray"); + if (t != null) { + Wm(t); + let r = pt(t), a = pt(n); + O(r === a, () => `Based on the provided shape, [${t}], the tensor should have ${r} values but has ${a}`); + for (let i = 0; i < n.length; ++i) { + let o = n[i], u = i === n.length - 1 ? o !== pt(t.slice(i)) : true; + O(n[i] === t[i] || !u, () => `Error creating a new Tensor. Inferred shape (${n}) does not match the provided shape (${t}). `); + } + } + return !Zt(e) && !Array.isArray(e) && (e = [e]), t = t || n, e = s !== "string" ? tp(e, s) : na(e, [], true), L.makeTensor(e, t, s); +} +function ds(e, t, n) { + let s = Es(e, n); + return $r(e, t, s, n); +} +var Uf = { float32: 4, float16: 2, int32: 4, uint16: 2, uint8: 1, bool: 1, complex64: 8 }; +var ld = 4; +async function P$(e, t) { + let n = [], s = [], r = Array.isArray(e) ? e.map((i) => i.name) : Object.keys(e); + for (let i = 0; i < r.length; ++i) { + let o = r[i], u = Array.isArray(e) ? e[i].tensor : e[o]; + if (u.dtype !== "float32" && u.dtype !== "int32" && u.dtype !== "bool" && u.dtype !== "string" && u.dtype !== "complex64") + throw new Error(`Unsupported dtype in weight '${o}': ${u.dtype}`); + let c = { name: o, shape: u.shape, dtype: u.dtype }; + if (u.dtype === "string") { + let l = new Promise(async (d) => { + let p = await u.bytes(), h = p.reduce((g, b) => g + b.length, 0) + ld * p.length, f = new Uint8Array(h), m = 0; + for (let g = 0; g < p.length; g++) { + let b = p[g], y = new Uint8Array(new Uint32Array([b.length]).buffer); + f.set(y, m), m += ld, f.set(b, m), m += b.length; + } + d(f); + }); + s.push(l); + } else + s.push(u.data()); + t != null && (c.group = t), n.push(c); + } + let a = await Promise.all(s); + return { data: z$(a), specs: n }; +} +function Lw(e, t) { + let n = {}, s, r = 0; + for (let a of t) { + let i = a.name, o = a.dtype, u = a.shape, c = pt(u), l; + if ("quantization" in a) { + let d = a.quantization; + if (d.dtype === "uint8" || d.dtype === "uint16") { + if (!("min" in d && "scale" in d)) + throw new Error(`Weight ${a.name} with quantization ${d.dtype} doesn't have corresponding metadata min and scale.`); + } else if (d.dtype === "float16") { + if (o !== "float32") + throw new Error(`Weight ${a.name} is quantized with ${d.dtype} which only supports weights of type float32 not ${o}.`); + } else + throw new Error(`Weight ${a.name} has unknown quantization dtype ${d.dtype}. Supported quantization dtypes are: 'uint8', 'uint16', and 'float16'.`); + let p = Uf[d.dtype], h = e.slice(r, r + c * p), f = d.dtype === "uint8" ? new Uint8Array(h) : new Uint16Array(h); + if (o === "float32") + if (d.dtype === "uint8" || d.dtype === "uint16") { + l = new Float32Array(f.length); + for (let m = 0; m < f.length; m++) { + let g = f[m]; + l[m] = g * d.scale + d.min; + } + } else if (d.dtype === "float16") + s === void 0 && (s = U$()), l = s(f); + else + throw new Error(`Unsupported quantization type ${d.dtype} for weight type float32.`); + else if (o === "int32") { + if (d.dtype !== "uint8" && d.dtype !== "uint16") + throw new Error(`Unsupported quantization type ${d.dtype} for weight type int32.`); + l = new Int32Array(f.length); + for (let m = 0; m < f.length; m++) { + let g = f[m]; + l[m] = Math.round(g * d.scale + d.min); + } + } else + throw new Error(`Unsupported dtype in weight '${i}': ${o}`); + r += c * p; + } else if (o === "string") { + let d = pt(a.shape); + l = []; + for (let p = 0; p < d; p++) { + let h = new Uint32Array(e.slice(r, r + ld))[0]; + r += ld; + let f = new Uint8Array(e.slice(r, r + h)); + l.push(f), r += h; + } + } else { + let d = Uf[o], p = e.slice(r, r + c * d); + if (o === "float32") + l = new Float32Array(p); + else if (o === "int32") + l = new Int32Array(p); + else if (o === "bool") + l = new Uint8Array(p); + else if (o === "complex64") { + l = new Float32Array(p); + let h = new Float32Array(l.length / 2), f = new Float32Array(l.length / 2); + for (let b = 0; b < h.length; b++) + h[b] = l[b * 2], f[b] = l[b * 2 + 1]; + let m = ds(h, u, "float32"), g = ds(f, u, "float32"); + n[i] = ia(m, g), m.dispose(), g.dispose(); + } else + throw new Error(`Unsupported dtype in weight '${i}': ${o}`); + r += c * d; + } + o !== "complex64" && (n[i] = ds(l, u, o)); + } + return n; +} +function z$(e) { + if (e === null) + throw new Error(`Invalid input value: ${JSON.stringify(e)}`); + let t = 0, n = []; + e.forEach((a) => { + if (t += a.byteLength, n.push(a.byteLength === a.buffer.byteLength ? a : new a.constructor(a)), !(a instanceof Float32Array || a instanceof Int32Array || a instanceof Uint8Array)) + throw new Error(`Unsupported TypedArray subtype: ${a.constructor.name}`); + }); + let s = new Uint8Array(t), r = 0; + return n.forEach((a) => { + s.set(new Uint8Array(a.buffer), r), r += a.byteLength; + }), s.buffer; +} +var vg = typeof Buffer != "undefined" && (typeof Blob == "undefined" || typeof atob == "undefined" || typeof btoa == "undefined"); +function Mv(e) { + return vg ? Buffer.byteLength(e) : new Blob([e]).size; +} +function M$(e) { + if (vg) + return Buffer.from(e).toString("base64"); + let t = new Uint8Array(e), n = ""; + for (let s = 0, r = t.length; s < r; s++) + n += String.fromCharCode(t[s]); + return btoa(n); +} +function L$(e) { + if (vg) { + let s = Buffer.from(e, "base64"); + return s.buffer.slice(s.byteOffset, s.byteOffset + s.byteLength); + } + let t = atob(e), n = new Uint8Array(t.length); + for (let s = 0; s < t.length; ++s) + n.set([t.charCodeAt(s)], s); + return n.buffer; +} +function xg(e) { + if (e.length === 1) + return e[0]; + let t = 0; + e.forEach((r) => { + t += r.byteLength; + }); + let n = new Uint8Array(t), s = 0; + return e.forEach((r) => { + n.set(new Uint8Array(r), s), s += r.byteLength; + }), n.buffer; +} +function Lv(e) { + let t = "/"; + for (e = e.trim(); e.endsWith(t); ) + e = e.slice(0, e.length - 1); + let n = e.split(t); + return n[n.length - 1]; +} +function Bw(e, t) { + let n = { modelTopology: e.modelTopology, format: e.format, generatedBy: e.generatedBy, convertedBy: e.convertedBy, weightsManifest: t }; + return e.signature != null && (n.signature = e.signature), e.userDefinedMetadata != null && (n.userDefinedMetadata = e.userDefinedMetadata), e.modelInitializer != null && (n.modelInitializer = e.modelInitializer), e.trainingConfig != null && (n.trainingConfig = e.trainingConfig), n; +} +async function wg(e, t) { + let n = { modelTopology: e.modelTopology, format: e.format, generatedBy: e.generatedBy, convertedBy: e.convertedBy }; + if (e.trainingConfig != null && (n.trainingConfig = e.trainingConfig), e.weightsManifest != null) { + let [s, r] = await t(e.weightsManifest); + n.weightSpecs = s, n.weightData = r; + } + return e.signature != null && (n.signature = e.signature), e.userDefinedMetadata != null && (n.userDefinedMetadata = e.userDefinedMetadata), e.modelInitializer != null && (n.modelInitializer = e.modelInitializer), n; +} +function Dl(e) { + if (e.modelTopology instanceof ArrayBuffer) + throw new Error("Expected JSON model topology, received ArrayBuffer."); + return { dateSaved: new Date(), modelTopologyType: "JSON", modelTopologyBytes: e.modelTopology == null ? 0 : Mv(JSON.stringify(e.modelTopology)), weightSpecsBytes: e.weightSpecs == null ? 0 : Mv(JSON.stringify(e.weightSpecs)), weightDataBytes: e.weightData == null ? 0 : e.weightData.byteLength }; +} +function B$() { + let e = (n) => { + let s = n << 13, r = 0; + for (; (s & 8388608) === 0; ) + r -= 8388608, s <<= 1; + return s &= -8388609, r += 947912704, s | r; + }, t = new Uint32Array(2048); + t[0] = 0; + for (let n = 1; n < 1024; n++) + t[n] = e(n); + for (let n = 1024; n < 2048; n++) + t[n] = 939524096 + (n - 1024 << 13); + return t; +} +function V$() { + let e = new Uint32Array(64); + e[0] = 0, e[31] = 1199570944, e[32] = 2147483648, e[63] = 3347054592; + for (let t = 1; t < 31; t++) + e[t] = t << 23; + for (let t = 33; t < 63; t++) + e[t] = 2147483648 + (t - 32 << 23); + return e; +} +function W$() { + let e = new Uint32Array(64); + for (let t = 0; t < 64; t++) + e[t] = 1024; + return e[0] = e[32] = 0, e; +} +function U$() { + let e = B$(), t = V$(), n = W$(); + return (s) => { + let r = new ArrayBuffer(4 * s.length), a = new Uint32Array(r); + for (let i = 0; i < s.length; i++) { + let o = s[i], u = e[n[o >> 10] + (o & 1023)] + t[o >> 10]; + a[i] = u; + } + return new Float32Array(r); + }; +} +var wt = class { + constructor() { + this.saveRouters = [], this.loadRouters = []; + } + static getInstance() { + return wt.instance == null && (wt.instance = new wt()), wt.instance; + } + static registerSaveRouter(e) { + wt.getInstance().saveRouters.push(e); + } + static registerLoadRouter(e) { + wt.getInstance().loadRouters.push(e); + } + static getSaveHandlers(e) { + return wt.getHandlers(e, "save"); + } + static getLoadHandlers(e, t) { + return wt.getHandlers(e, "load", t); + } + static getHandlers(e, t, n) { + let s = []; + return (t === "load" ? wt.getInstance().loadRouters : wt.getInstance().saveRouters).forEach((a) => { + let i = a(e, n); + i !== null && s.push(i); + }), s; + } +}; +var G$ = (e) => wt.registerSaveRouter(e); +var H$ = (e) => wt.registerLoadRouter(e); +var q$ = (e) => wt.getSaveHandlers(e); +var j$ = (e, t) => wt.getLoadHandlers(e, t); +var Gf = "tensorflowjs"; +var Hf = 1; +var Xr = "models_store"; +var cr = "model_info_store"; +function Vw() { + if (!X().getBool("IS_BROWSER")) + throw new Error("Failed to obtain IndexedDB factory because the current environmentis not a web browser."); + let e = typeof window == "undefined" ? self : window, t = e.indexedDB || e.mozIndexedDB || e.webkitIndexedDB || e.msIndexedDB || e.shimIndexedDB; + if (t == null) + throw new Error("The current browser does not appear to support IndexedDB."); + return t; +} +function qf(e) { + let t = e.result; + t.createObjectStore(Xr, { keyPath: "modelPath" }), t.createObjectStore(cr, { keyPath: "modelPath" }); +} +var oa = class { + constructor(e) { + if (this.indexedDB = Vw(), e == null || !e) + throw new Error("For IndexedDB, modelPath must not be null, undefined or empty."); + this.modelPath = e; + } + async save(e) { + if (e.modelTopology instanceof ArrayBuffer) + throw new Error("BrowserLocalStorage.save() does not support saving model topology in binary formats yet."); + return this.databaseAction(this.modelPath, e); + } + async load() { + return this.databaseAction(this.modelPath); + } + databaseAction(e, t) { + return new Promise((n, s) => { + let r = this.indexedDB.open(Gf, Hf); + r.onupgradeneeded = () => qf(r), r.onsuccess = () => { + let a = r.result; + if (t == null) { + let i = a.transaction(Xr, "readonly"), u = i.objectStore(Xr).get(this.modelPath); + u.onsuccess = () => { + if (u.result == null) + return a.close(), s(new Error(`Cannot find model with path '${this.modelPath}' in IndexedDB.`)); + n(u.result.modelArtifacts); + }, u.onerror = (c) => (a.close(), s(u.error)), i.oncomplete = () => a.close(); + } else { + let i = Dl(t), o = a.transaction(cr, "readwrite"), u = o.objectStore(cr), c = u.put({ modelPath: this.modelPath, modelArtifactsInfo: i }), l; + c.onsuccess = () => { + l = a.transaction(Xr, "readwrite"); + let p = l.objectStore(Xr).put({ modelPath: this.modelPath, modelArtifacts: t, modelArtifactsInfo: i }); + p.onsuccess = () => n({ modelArtifactsInfo: i }), p.onerror = (h) => { + u = o.objectStore(cr); + let f = u.delete(this.modelPath); + f.onsuccess = () => (a.close(), s(p.error)), f.onerror = (m) => (a.close(), s(p.error)); + }; + }, c.onerror = (d) => (a.close(), s(c.error)), o.oncomplete = () => { + l == null ? a.close() : l.oncomplete = () => a.close(); + }; + } + }, r.onerror = (a) => s(r.error); + }); + } +}; +oa.URL_SCHEME = "indexeddb://"; +var Ww = (e) => X().getBool("IS_BROWSER") && !Array.isArray(e) && e.startsWith(oa.URL_SCHEME) ? K$(e.slice(oa.URL_SCHEME.length)) : null; +wt.registerSaveRouter(Ww); +wt.registerLoadRouter(Ww); +function K$(e) { + return new oa(e); +} +function X$(e) { + return e.startsWith(oa.URL_SCHEME) ? e.slice(oa.URL_SCHEME.length) : e; +} +var Y$ = class { + constructor() { + this.indexedDB = Vw(); + } + async listModels() { + return new Promise((e, t) => { + let n = this.indexedDB.open(Gf, Hf); + n.onupgradeneeded = () => qf(n), n.onsuccess = () => { + let s = n.result, r = s.transaction(cr, "readonly"), i = r.objectStore(cr).getAll(); + i.onsuccess = () => { + let o = {}; + for (let u of i.result) + o[u.modelPath] = u.modelArtifactsInfo; + e(o); + }, i.onerror = (o) => (s.close(), t(i.error)), r.oncomplete = () => s.close(); + }, n.onerror = (s) => t(n.error); + }); + } + async removeModel(e) { + return e = X$(e), new Promise((t, n) => { + let s = this.indexedDB.open(Gf, Hf); + s.onupgradeneeded = () => qf(s), s.onsuccess = () => { + let r = s.result, a = r.transaction(cr, "readwrite"), i = a.objectStore(cr), o = i.get(e), u; + o.onsuccess = () => { + if (o.result == null) + return r.close(), n(new Error(`Cannot find model with path '${e}' in IndexedDB.`)); + { + let c = i.delete(e), l = () => { + u = r.transaction(Xr, "readwrite"); + let p = u.objectStore(Xr).delete(e); + p.onsuccess = () => t(o.result.modelArtifactsInfo), p.onerror = (h) => n(o.error); + }; + c.onsuccess = l, c.onerror = (d) => (l(), r.close(), n(o.error)); + } + }, o.onerror = (c) => (r.close(), n(o.error)), a.oncomplete = () => { + u == null ? r.close() : u.oncomplete = () => r.close(); + }; + }, s.onerror = (r) => n(s.error); + }); + } +}; +var Us = "/"; +var Li = "tensorflowjs_models"; +var Uw = "info"; +var Q$ = "model_topology"; +var Z$ = "weight_specs"; +var J$ = "weight_data"; +var eA = "model_metadata"; +function Gw(e) { + return { info: [Li, e, Uw].join(Us), topology: [Li, e, Q$].join(Us), weightSpecs: [Li, e, Z$].join(Us), weightData: [Li, e, J$].join(Us), modelMetadata: [Li, e, eA].join(Us) }; +} +function Hw(e) { + for (let t of Object.values(e)) + window.localStorage.removeItem(t); +} +function tA(e) { + let t = e.split(Us); + if (t.length < 3) + throw new Error(`Invalid key format: ${e}`); + return t.slice(1, t.length - 1).join(Us); +} +function nA(e) { + return e.startsWith(ua.URL_SCHEME) ? e.slice(ua.URL_SCHEME.length) : e; +} +var ua = class { + constructor(e) { + if (!X().getBool("IS_BROWSER") || typeof window == "undefined" || typeof window.localStorage == "undefined") + throw new Error("The current environment does not support local storage."); + if (this.LS = window.localStorage, e == null || !e) + throw new Error("For local storage, modelPath must not be null, undefined or empty."); + this.modelPath = e, this.keys = Gw(this.modelPath); + } + async save(e) { + if (e.modelTopology instanceof ArrayBuffer) + throw new Error("BrowserLocalStorage.save() does not support saving model topology in binary formats yet."); + { + let t = JSON.stringify(e.modelTopology), n = JSON.stringify(e.weightSpecs), s = Dl(e); + try { + this.LS.setItem(this.keys.info, JSON.stringify(s)), this.LS.setItem(this.keys.topology, t), this.LS.setItem(this.keys.weightSpecs, n), this.LS.setItem(this.keys.weightData, M$(e.weightData)); + let r = { format: e.format, generatedBy: e.generatedBy, convertedBy: e.convertedBy, signature: e.signature != null ? e.signature : void 0, userDefinedMetadata: e.userDefinedMetadata != null ? e.userDefinedMetadata : void 0, modelInitializer: e.modelInitializer != null ? e.modelInitializer : void 0, trainingConfig: e.trainingConfig != null ? e.trainingConfig : void 0 }; + return this.LS.setItem(this.keys.modelMetadata, JSON.stringify(r)), { modelArtifactsInfo: s }; + } catch (r) { + throw Hw(this.keys), new Error(`Failed to save model '${this.modelPath}' to local storage: size quota being exceeded is a possible cause of this failure: modelTopologyBytes=${s.modelTopologyBytes}, weightSpecsBytes=${s.weightSpecsBytes}, weightDataBytes=${s.weightDataBytes}.`); + } + } + } + async load() { + let e = JSON.parse(this.LS.getItem(this.keys.info)); + if (e == null) + throw new Error(`In local storage, there is no model with name '${this.modelPath}'`); + if (e.modelTopologyType !== "JSON") + throw new Error("BrowserLocalStorage does not support loading non-JSON model topology yet."); + let t = {}, n = JSON.parse(this.LS.getItem(this.keys.topology)); + if (n == null) + throw new Error(`In local storage, the topology of model '${this.modelPath}' is missing.`); + t.modelTopology = n; + let s = JSON.parse(this.LS.getItem(this.keys.weightSpecs)); + if (s == null) + throw new Error(`In local storage, the weight specs of model '${this.modelPath}' are missing.`); + t.weightSpecs = s; + let r = this.LS.getItem(this.keys.modelMetadata); + if (r != null) { + let i = JSON.parse(r); + t.format = i.format, t.generatedBy = i.generatedBy, t.convertedBy = i.convertedBy, i.signature != null && (t.signature = i.signature), i.userDefinedMetadata != null && (t.userDefinedMetadata = i.userDefinedMetadata), i.modelInitializer != null && (t.modelInitializer = i.modelInitializer), i.trainingConfig != null && (t.trainingConfig = i.trainingConfig); + } + let a = this.LS.getItem(this.keys.weightData); + if (a == null) + throw new Error(`In local storage, the binary weight values of model '${this.modelPath}' are missing.`); + return t.weightData = L$(a), t; + } +}; +ua.URL_SCHEME = "localstorage://"; +var qw = (e) => X().getBool("IS_BROWSER") && !Array.isArray(e) && e.startsWith(ua.URL_SCHEME) ? sA(e.slice(ua.URL_SCHEME.length)) : null; +wt.registerSaveRouter(qw); +wt.registerLoadRouter(qw); +function sA(e) { + return new ua(e); +} +var rA = class { + constructor() { + O(X().getBool("IS_BROWSER"), () => "Current environment is not a web browser"), O(typeof window == "undefined" || typeof window.localStorage != "undefined", () => "Current browser does not appear to support localStorage"), this.LS = window.localStorage; + } + async listModels() { + let e = {}, t = Li + Us, n = Us + Uw; + for (let s = 0; s < this.LS.length; ++s) { + let r = this.LS.key(s); + if (r.startsWith(t) && r.endsWith(n)) { + let a = tA(r); + e[a] = JSON.parse(this.LS.getItem(r)); + } + } + return e; + } + async removeModel(e) { + e = nA(e); + let t = Gw(e); + if (this.LS.getItem(t.info) == null) + throw new Error(`Cannot find model at path '${e}'`); + let n = JSON.parse(this.LS.getItem(t.info)); + return Hw(t), n; + } +}; +var Ui = "://"; +var zn = class { + constructor() { + this.managers = {}; + } + static getInstance() { + return zn.instance == null && (zn.instance = new zn()), zn.instance; + } + static registerManager(e, t) { + O(e != null, () => "scheme must not be undefined or null."), e.endsWith(Ui) && (e = e.slice(0, e.indexOf(Ui))), O(e.length > 0, () => "scheme must not be an empty string."); + let n = zn.getInstance(); + O(n.managers[e] == null, () => `A model store manager is already registered for scheme '${e}'.`), n.managers[e] = t; + } + static getManager(e) { + let t = this.getInstance().managers[e]; + if (t == null) + throw new Error(`Cannot find model manager for scheme '${e}'`); + return t; + } + static getSchemes() { + return Object.keys(this.getInstance().managers); + } +}; +function jc(e) { + if (e.indexOf(Ui) === -1) + throw new Error(`The url string provided does not contain a scheme. Supported schemes are: ${zn.getSchemes().join(",")}`); + return { scheme: e.split(Ui)[0], path: e.split(Ui)[1] }; +} +async function jw(e, t, n = false) { + O(e !== t, () => `Old path and new path are the same: '${e}'`); + let s = wt.getLoadHandlers(e); + O(s.length > 0, () => `Copying failed because no load handler is found for source URL ${e}.`), O(s.length < 2, () => `Copying failed because more than one (${s.length}) load handlers for source URL ${e}.`); + let r = s[0], a = wt.getSaveHandlers(t); + O(a.length > 0, () => `Copying failed because no save handler is found for destination URL ${t}.`), O(a.length < 2, () => `Copying failed because more than one (${s.length}) save handlers for destination URL ${t}.`); + let i = a[0], o = jc(e).scheme, u = jc(e).path, c = o === jc(e).scheme, l = await r.load(); + n && c && await zn.getManager(o).removeModel(u); + let d = await i.save(l); + return n && !c && await zn.getManager(o).removeModel(u), d.modelArtifactsInfo; +} +async function aA() { + let e = zn.getSchemes(), t = {}; + for (let n of e) { + let s = await zn.getManager(n).listModels(); + for (let r in s) { + let a = n + Ui + r; + t[a] = s[r]; + } + } + return t; +} +async function iA(e) { + let t = jc(e); + return zn.getManager(t.scheme).removeModel(t.path); +} +async function oA(e, t) { + return jw(e, t, false); +} +async function uA(e, t) { + return jw(e, t, true); +} +var lA = class { + fetch(e, t) { + return fetch(e, t); + } + now() { + return performance.now(); + } + encode(e, t) { + if (t !== "utf-8" && t !== "utf8") + throw new Error(`Browser's encoder only supports utf-8, but got ${t}`); + return this.textEncoder == null && (this.textEncoder = new TextEncoder()), this.textEncoder.encode(e); + } + decode(e, t) { + return new TextDecoder(t).decode(e); + } +}; +if (X().get("IS_BROWSER")) { + X().setPlatform("browser", new lA()); + try { + zn.registerManager(ua.URL_SCHEME, new rA()); + } catch (e) { + } + try { + zn.registerManager(oa.URL_SCHEME, new Y$()); + } catch (e) { + } +} +var cA = { importFetch: () => bT() }; +var kf; +var dA = class { + constructor() { + this.util = yT(), this.textEncoder = new this.util.TextEncoder(); + } + fetch(e, t) { + return X().global.fetch != null ? X().global.fetch(e, t) : (kf == null && (kf = cA.importFetch()), kf(e, t)); + } + now() { + let e = process.hrtime(); + return e[0] * 1e3 + e[1] / 1e6; + } + encode(e, t) { + if (t !== "utf-8" && t !== "utf8") + throw new Error(`Node built-in encoder only supports utf-8, but got ${t}`); + return this.textEncoder.encode(e); + } + decode(e, t) { + return e.length === 0 ? "" : new this.util.TextDecoder(t).decode(e); + } +}; +X().get("IS_NODE") && !X().get("IS_BROWSER") && X().setPlatform("node", new dA()); +function _e(e, t = "float32", n) { + return t = t || "float32", Wm(e), new Vt(e, t, n); +} +function pA(e, t) { + let n = E(e, "x", "cast"); + if (!xw(t)) + throw new Error(`Failed to cast to unknown dtype ${t}`); + if (t === "string" && n.dtype !== "string" || t !== "string" && n.dtype === "string") + throw new Error("Only strings can be casted to strings"); + let s = { x: n }, r = { dtype: t }; + return L.runKernel(Ca, s, r); +} +var pe = B({ cast_: pA }); +function hA(e) { + let n = { x: E(e, "x", "clone", "string_or_numeric") }; + return L.runKernel(La, n); +} +var pr = B({ clone_: hA }); +function fA(e, t = false) { + console.log(e.toString(t)); +} +Pw(); +var mA = { buffer: _e, cast: pe, clone: pr, print: fA }; +k$(mA); +var An = {}; +Ae(An, { browserFiles: () => kA, browserHTTPRequest: () => TA, concatenateArrayBuffers: () => xg, copyModel: () => oA, decodeWeights: () => Lw, encodeWeights: () => P$, fromMemory: () => AA, getLoadHandlers: () => j$, getModelArtifactsForJSON: () => wg, getModelArtifactsInfoForJSON: () => Dl, getSaveHandlers: () => q$, http: () => Ig, isHTTPScheme: () => Kf, listModels: () => aA, loadWeights: () => IA, moveModel: () => uA, registerLoadRouter: () => H$, registerSaveRouter: () => G$, removeModel: () => iA, weightsLoaderFactory: () => Xw, withSaveHandler: () => EA }); +var gA = "model"; +var bA = ".json"; +var yA = ".weights.bin"; +function Bv(e) { + return new Promise((t) => setTimeout(t)).then(e); +} +var jf = class { + constructor(e) { + if (!X().getBool("IS_BROWSER")) + throw new Error("browserDownloads() cannot proceed because the current environment is not a browser."); + e.startsWith(jf.URL_SCHEME) && (e = e.slice(jf.URL_SCHEME.length)), (e == null || e.length === 0) && (e = gA), this.modelJsonFileName = e + bA, this.weightDataFileName = e + yA; + } + async save(e) { + if (typeof document == "undefined") + throw new Error("Browser downloads are not supported in this environment since `document` is not present"); + let t = window.URL.createObjectURL(new Blob([e.weightData], { type: "application/octet-stream" })); + if (e.modelTopology instanceof ArrayBuffer) + throw new Error("BrowserDownloads.save() does not support saving model topology in binary formats yet."); + { + let n = [{ paths: ["./" + this.weightDataFileName], weights: e.weightSpecs }], s = Bw(e, n), r = window.URL.createObjectURL(new Blob([JSON.stringify(s)], { type: "application/json" })), a = this.modelJsonAnchor == null ? document.createElement("a") : this.modelJsonAnchor; + if (a.download = this.modelJsonFileName, a.href = r, await Bv(() => a.dispatchEvent(new MouseEvent("click"))), e.weightData != null) { + let i = this.weightDataAnchor == null ? document.createElement("a") : this.weightDataAnchor; + i.download = this.weightDataFileName, i.href = t, await Bv(() => i.dispatchEvent(new MouseEvent("click"))); + } + return { modelArtifactsInfo: Dl(e) }; + } + } +}; +var cd = jf; +cd.URL_SCHEME = "downloads://"; +var vA = class { + constructor(e) { + if (e == null || e.length < 1) + throw new Error(`When calling browserFiles, at least 1 file is required, but received ${e}`); + this.jsonFile = e[0], this.weightsFiles = e.slice(1); + } + async load() { + return new Promise((e, t) => { + let n = new FileReader(); + n.onload = (s) => { + let r = JSON.parse(s.target.result), a = r.modelTopology; + if (a == null) { + t(new Error(`modelTopology field is missing from file ${this.jsonFile.name}`)); + return; + } + if (r.weightsManifest == null) { + t(new Error(`weightManifest field is missing from file ${this.jsonFile.name}`)); + return; + } + if (this.weightsFiles.length === 0) { + e({ modelTopology: a }); + return; + } + let o = wg(r, (u) => this.loadWeights(u)); + e(o); + }, n.onerror = (s) => t(`Failed to read model topology and weights manifest JSON from file '${this.jsonFile.name}'. BrowserFiles supports loading Keras-style tf.Model artifacts only.`), n.readAsText(this.jsonFile); + }); + } + loadWeights(e) { + let t = [], n = []; + for (let a of e) + t.push(...a.weights), n.push(...a.paths); + let s = this.checkManifestAndWeightFiles(e), r = n.map((a) => this.loadWeightsFile(a, s[a])); + return Promise.all(r).then((a) => [t, xg(a)]); + } + loadWeightsFile(e, t) { + return new Promise((n, s) => { + let r = new FileReader(); + r.onload = (a) => { + let i = a.target.result; + n(i); + }, r.onerror = (a) => s(`Failed to weights data from file of path '${e}'.`), r.readAsArrayBuffer(t); + }); + } + checkManifestAndWeightFiles(e) { + let t = [], n = this.weightsFiles.map((r) => Lv(r.name)), s = {}; + for (let r of e) + r.paths.forEach((a) => { + let i = Lv(a); + if (t.indexOf(i) !== -1) + throw new Error(`Duplicate file basename found in weights manifest: '${i}'`); + if (t.push(i), n.indexOf(i) === -1) + throw new Error(`Weight file with basename '${i}' is not provided.`); + s[a] = this.weightsFiles[n.indexOf(i)]; + }); + if (t.length !== this.weightsFiles.length) + throw new Error(`Mismatch in the number of files in weights manifest (${t.length}) and the number of weight files provided (${this.weightsFiles.length}).`); + return s; + } +}; +var xA = (e) => X().getBool("IS_BROWSER") && !Array.isArray(e) && e.startsWith(cd.URL_SCHEME) ? wA(e.slice(cd.URL_SCHEME.length)) : null; +wt.registerSaveRouter(xA); +function wA(e = "model") { + return new cd(e); +} +function kA(e) { + return new vA(e); +} +function Vv(e, t, n, s) { + i(e), n = n == null ? 0 : n, s = s == null ? 1 : s, o(n, s); + let r = 0, a = (u) => (u.then((c) => { + let l = n + ++r / e.length * (s - n); + return t(l), c; + }), u); + function i(u) { + O(u != null && Array.isArray(u) && u.length > 0, () => "promises must be a none empty array"); + } + function o(u, c) { + O(u >= 0 && u <= 1, () => `Progress fraction must be in range [0, 1], but got startFraction ${u}`), O(c >= 0 && c <= 1, () => `Progress fraction must be in range [0, 1], but got endFraction ${c}`), O(c >= u, () => `startFraction must be no more than endFraction, but got startFraction ${u} and endFraction ${c}`); + } + return Promise.all(e.map(a)); +} +async function Kw(e, t) { + t == null && (t = {}); + let n = t.fetchFunc == null ? X().platform.fetch : t.fetchFunc, s = e.map((d) => n(d, t.requestInit, { isBinary: true })), r = 0, a = 0.5, o = (t.onProgress == null ? await Promise.all(s) : await Vv(s, t.onProgress, r, a)).map((d) => d.arrayBuffer()), u = 0.5, c = 1; + return t.onProgress == null ? await Promise.all(o) : await Vv(o, t.onProgress, u, c); +} +async function IA(e, t = "", n, s) { + return Xw((i) => Kw(i, { requestInit: s }))(e, t, n); +} +function Xw(e) { + return async (t, n = "", s) => { + let r = t.map(() => false), a = {}, i = s != null ? s.map(() => false) : [], o = []; + if (t.forEach((h, f) => { + let m = 0; + h.weights.forEach((g) => { + let b = "quantization" in g ? g.quantization.dtype : g.dtype, y = Uf[b] * pt(g.shape), v = () => { + r[f] = true, a[f] == null && (a[f] = []), a[f].push({ manifestEntry: g, groupOffset: m, sizeBytes: y }); + }; + s != null ? s.forEach((x, k) => { + x === g.name && (v(), i[k] = true); + }) : v(), o.push(g.name), m += y; + }); + }), !i.every((h) => h)) { + let h = s.filter((f, m) => !i[m]); + throw new Error(`Could not find weights in manifest with names: ${h.join(", ")}. +Manifest JSON has weights with names: ${o.join(", ")}.`); + } + let u = r.reduce((h, f, m) => (f && h.push(m), h), []), c = []; + u.forEach((h) => { + t[h].paths.forEach((f) => { + let m = n + (n.endsWith("/") ? "" : "/") + f; + c.push(m); + }); + }); + let l = await e(c), d = {}, p = 0; + return u.forEach((h) => { + let f = t[h].paths.length, m = 0; + for (let x = 0; x < f; x++) + m += l[p + x].byteLength; + let g = new ArrayBuffer(m), b = new Uint8Array(g), y = 0; + for (let x = 0; x < f; x++) { + let k = new Uint8Array(l[p + x]); + b.set(k, y), y += k.byteLength; + } + a[h].forEach((x) => { + let k = g.slice(x.groupOffset, x.groupOffset + x.sizeBytes), T = Lw(k, [x.manifestEntry]); + for (let C in T) + d[C] = T[C]; + }), p += f; + }), d; + }; +} +var SA = "application/octet-stream"; +var CA = "application/json"; +var kg = class { + constructor(e, t) { + if (this.DEFAULT_METHOD = "POST", t == null && (t = {}), this.weightPathPrefix = t.weightPathPrefix, this.onProgress = t.onProgress, this.weightUrlConverter = t.weightUrlConverter, t.fetchFunc != null ? (O(typeof t.fetchFunc == "function", () => "Must pass a function that matches the signature of `fetch` (see https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API)"), this.fetch = t.fetchFunc) : this.fetch = X().platform.fetch, O(e != null && e.length > 0, () => "URL path for http must not be null, undefined or empty."), Array.isArray(e) && O(e.length === 2, () => `URL paths for http must have a length of 2, (actual length is ${e.length}).`), this.path = e, t.requestInit != null && t.requestInit.body != null) + throw new Error("requestInit is expected to have no pre-existing body, but has one."); + this.requestInit = t.requestInit || {}; + } + async save(e) { + if (e.modelTopology instanceof ArrayBuffer) + throw new Error("BrowserHTTPRequest.save() does not support saving model topology in binary formats yet."); + let t = Object.assign({ method: this.DEFAULT_METHOD }, this.requestInit); + t.body = new FormData(); + let n = [{ paths: ["./model.weights.bin"], weights: e.weightSpecs }], s = Bw(e, n); + t.body.append("model.json", new Blob([JSON.stringify(s)], { type: CA }), "model.json"), e.weightData != null && t.body.append("model.weights.bin", new Blob([e.weightData], { type: SA }), "model.weights.bin"); + let r = await this.fetch(this.path, t); + if (r.ok) + return { modelArtifactsInfo: Dl(e), responses: [r] }; + throw new Error(`BrowserHTTPRequest.save() failed due to HTTP response status ${r.status}.`); + } + async load() { + let e = await this.fetch(this.path, this.requestInit); + if (!e.ok) + throw new Error(`Request to ${this.path} failed with status code ${e.status}. Please verify this URL points to the model JSON of the model to load.`); + let t; + try { + t = await e.json(); + } catch (r) { + let a = `Failed to parse model JSON of response from ${this.path}.`; + throw this.path.endsWith(".pb") ? a += " Your path contains a .pb file extension. Support for .pb models have been removed in TensorFlow.js 1.0 in favor of .json models. You can re-convert your Python TensorFlow model using the TensorFlow.js 1.0 conversion scripts or you can convert your.pb models with the 'pb2json'NPM script in the tensorflow/tfjs-converter repository." : a += " Please make sure the server is serving valid JSON for this request.", new Error(a); + } + let n = t.modelTopology, s = t.weightsManifest; + if (n == null && s == null) + throw new Error(`The JSON from HTTP path ${this.path} contains neither model topology or manifest for weights.`); + return wg(t, (r) => this.loadWeights(r)); + } + async loadWeights(e) { + let t = Array.isArray(this.path) ? this.path[1] : this.path, [n, s] = NA(t), r = this.weightPathPrefix || n, a = []; + for (let c of e) + a.push(...c.weights); + let i = [], o = []; + for (let c of e) + for (let l of c.paths) + this.weightUrlConverter != null ? o.push(this.weightUrlConverter(l)) : i.push(r + l + s); + this.weightUrlConverter && i.push(...await Promise.all(o)); + let u = await Kw(i, { requestInit: this.requestInit, fetchFunc: this.fetch, onProgress: this.onProgress }); + return [a, xg(u)]; + } +}; +kg.URL_SCHEME_REGEX = /^https?:\/\//; +function NA(e) { + let t = e.lastIndexOf("/"), n = e.lastIndexOf("?"), s = e.substring(0, t), r = n > t ? e.substring(n) : ""; + return [s + "/", r]; +} +function Kf(e) { + return e.match(kg.URL_SCHEME_REGEX) != null; +} +var Yw = (e, t) => { + if (typeof fetch == "undefined" && (t == null || t.fetchFunc == null)) + return null; + { + let n = true; + if (Array.isArray(e) ? n = e.every((s) => Kf(s)) : n = Kf(e), n) + return Ig(e, t); + } + return null; +}; +wt.registerSaveRouter(Yw); +wt.registerLoadRouter(Yw); +function Ig(e, t) { + return new kg(e, t); +} +function TA(e, t) { + return Ig(e, t); +} +var If = class { + constructor(e) { + this.modelArtifacts = e; + } + async load() { + return this.modelArtifacts; + } +}; +var $A = class { + constructor(e) { + this.saveHandler = e; + } + async save(e) { + return this.saveHandler(e); + } +}; +function AA(e, t, n, s) { + return arguments.length === 1 ? e.modelTopology != null || e.weightSpecs != null ? new If(e) : (console.warn("Please call tf.io.fromMemory() with only one argument. The argument should be of type ModelArtifacts. The multi-argument signature of tf.io.fromMemory() has been deprecated and will be removed in a future release."), new If({ modelTopology: e })) : (console.warn("Please call tf.io.fromMemory() with only one argument. The argument should be of type ModelArtifacts. The multi-argument signature of tf.io.fromMemory() has been deprecated and will be removed in a future release."), new If({ modelTopology: e, weightSpecs: t, weightData: n, trainingConfig: s })); +} +function EA(e) { + return new $A(e); +} +var _A = {}; +Ae(_A, { confusionMatrix: () => PA }); +function RA(e, t, n = false, s = false) { + let r = E(e, "a", "matMul"), a = E(t, "b", "matMul"); + [r, a] = vt(r, a); + let i = { a: r, b: a }, o = { transposeA: n, transposeB: s }; + return L.runKernel(Sa, i, o); +} +var Me = B({ matMul_: RA }); +function DA(e, t, n = 1, s = 0) { + if (t < 2) + throw new Error(`Error in oneHot: depth must be >=2, but it is ${t}`); + let a = { indices: E(e, "indices", "oneHot", "int32") }, i = { depth: t, onValue: n, offValue: s }; + return L.runKernel(Co, a, i); +} +var dd = B({ oneHot_: DA }); +function FA(e, t) { + let n = E(e, "x", "transpose"); + if (t == null && (t = n.shape.map((a, i) => i).reverse()), O(n.rank === t.length, () => `Error in transpose: rank of input ${n.rank} must match length of perm ${t}.`), t.forEach((a) => { + O(a >= 0 && a < n.rank, () => `All entries in 'perm' must be between 0 and ${n.rank - 1} but got ${t}`); + }), n.rank <= 1) + return n.clone(); + let s = { x: n }, r = { perm: t }; + return L.runKernel(di, s, r); +} +var qe = B({ transpose_: FA }); +function OA(e, t, n) { + let s = E(e, "labels", "confusionMatrix"), r = E(t, "predictions", "confusionMatrix"); + O(n == null || n > 0 && Number.isInteger(n), () => `If provided, numClasses must be a positive integer, but got ${n}`), O(s.rank === 1, () => `Expected the rank of labels to be 1, but got ${s.rank}`), O(r.rank === 1, () => `Expected the rank of predictions to be 1, but got ${r.rank}`), O(s.shape[0] === r.shape[0], () => `Mismatch in the number of examples: ${s.shape[0]} vs. ${r.shape[0]}. Labels and predictions should have the same number of elements.`), O(n > 0 && Number.isInteger(n), () => `numClasses is required to be a positive integer, but got ${n}`); + let a = dd(pe(s, "int32"), n), i = dd(pe(r, "int32"), n), o = qe(a), u = Me(o, i); + return pe(u, "int32"); +} +var PA = B({ confusionMatrix_: OA }); +var Go = {}; +Ae(Go, { assertAndGetBroadcastShape: () => ot, getBroadcastDims: () => Qw, getReductionAxes: () => $t }); +function Qw(e, t) { + let n = e.length, s = []; + for (let r = 0; r < n; r++) { + let a = n - 1 - r, i = e[a] || 1; + (t[t.length - 1 - r] || 1) > 1 && i === 1 && s.unshift(a); + } + return s; +} +function $t(e, t) { + let n = []; + for (let s = 0; s < t.length; s++) { + let r = e[e.length - s - 1], a = t.length - s - 1, i = t[a]; + (r == null || r === 1 && i > 1) && n.unshift(a); + } + return n; +} +function ot(e, t) { + let n = [], s = Math.max(e.length, t.length); + for (let r = 0; r < s; r++) { + let a = e[e.length - r - 1]; + a == null && (a = 1); + let i = t[t.length - r - 1]; + if (i == null && (i = 1), a === 1) + n.unshift(i); + else if (i === 1) + n.unshift(a); + else if (a !== i) { + let o = `Operands could not be broadcast together with shapes ${e} and ${t}.`; + throw Error(o); + } else + n.unshift(a); + } + return n; +} +var Zw = {}; +Ae(Zw, { fromPixels: () => GA, fromPixelsAsync: () => WA, toPixels: () => UA }); +function zA(e, t, n) { + if (xa(e), t != null && t.length !== 3) + throw new Error("tensor3d() requires shape to have three numbers"); + let s = Es(e, n); + if (s.length !== 3 && s.length !== 1) + throw new Error("tensor3d() requires values to be number[][][] or flat/TypedArray"); + if (s.length === 1 && t == null) + throw new Error("tensor3d() requires shape to be provided when `values` are a flat array"); + return $r(e, t, s, n); +} +var Vr; +function Jw(e, t = 3) { + if (t > 4) + throw new Error("Cannot construct Tensor with more than 4 channels from pixels."); + if (e == null) + throw new Error("pixels passed to tf.browser.fromPixels() can not be null"); + let n = false, s = false, r = false, a = false, i = false, o = false; + if (e.data instanceof Uint8Array) + n = true; + else if (typeof ImageData != "undefined" && e instanceof ImageData) + s = true; + else if (typeof HTMLVideoElement != "undefined" && e instanceof HTMLVideoElement) + r = true; + else if (typeof HTMLImageElement != "undefined" && e instanceof HTMLImageElement) + a = true; + else if (e.getContext != null) + i = true; + else if (typeof ImageBitmap != "undefined" && e instanceof ImageBitmap) + o = true; + else + throw new Error(`pixels passed to tf.browser.fromPixels() must be either an HTMLVideoElement, HTMLImageElement, HTMLCanvasElement, ImageData in browser, or OffscreenCanvas, ImageData in webworker or {data: Uint32Array, width: number, height: number}, but was ${e.constructor.name}`); + if (r && r && e.readyState < 2) + throw new Error("The video element has not loaded data yet. Please wait for `loadeddata` event on the