From c425cfe6e00744754e8ae42713d46dae7b0e7c8d Mon Sep 17 00:00:00 2001 From: Vladimir Mandic Date: Fri, 29 Jul 2022 09:24:04 -0400 Subject: [PATCH] update build platform --- CHANGELOG.md | 9 +- demo/faceid/index.js | 8 - demo/typescript/index.js | 8 - dist/human.esm-nobundle.js | 8 - dist/human.esm.js | 303 -------- dist/human.esm.js.map | 2 +- dist/human.js | 303 -------- dist/human.node-gpu.js | 8 - dist/human.node-wasm.js | 8 - dist/human.node.js | 8 - dist/tfjs.esm.js | 295 -------- package.json | 6 +- test/build.log | 48 +- test/test.log | 1368 ++++++++++++++++++------------------ 14 files changed, 718 insertions(+), 1664 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 5d205e01..d7b14eda 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,11 +9,14 @@ ## Changelog +### **HEAD -> main** 2022/07/26 mandic00@live.com + + +### **release: 2.9.1** 2022/07/25 mandic00@live.com + + ### **2.9.1** 2022/07/25 mandic00@live.com - -### **origin/main** 2022/07/23 mandic00@live.com - - full rebuild - release cleanup - tflite experiments diff --git a/demo/faceid/index.js b/demo/faceid/index.js index c29a04e1..8beacbf6 100644 --- a/demo/faceid/index.js +++ b/demo/faceid/index.js @@ -6,12 +6,4 @@ import{Human as H}from"../../dist/human.esm.js";var d,R="human",m="person",g=(...t)=>console.log("indexdb",...t);async function b(){return d?!0:new Promise(t=>{let i=indexedDB.open(R,1);i.onerror=s=>g("error:",s),i.onupgradeneeded=s=>{g("create:",s.target),d=s.target.result,d.createObjectStore(m,{keyPath:"id",autoIncrement:!0})},i.onsuccess=s=>{d=s.target.result,g("open:",d),t(!0)}})}async function C(){let t=[];return d||await b(),new Promise(i=>{let s=d.transaction([m],"readwrite").objectStore(m).openCursor(null,"next");s.onerror=o=>g("load error:",o),s.onsuccess=o=>{o.target.result?(t.push(o.target.result.value),o.target.result.continue()):i(t)}})}async function k(){return d||await b(),new Promise(t=>{let i=d.transaction([m],"readwrite").objectStore(m).count();i.onerror=s=>g("count error:",s),i.onsuccess=()=>t(i.result)})}async function x(t){d||await b();let i={name:t.name,descriptor:t.descriptor,image:t.image};d.transaction([m],"readwrite").objectStore(m).put(i),g("save:",i)}async function D(t){d||await b(),d.transaction([m],"readwrite").objectStore(m).delete(t.id),g("delete:",t)}var v={modelBasePath:"../../models",filter:{equalization:!0},face:{enabled:!0,detector:{rotation:!0,return:!0,cropFactor:1.6,mask:!1},description:{enabled:!0},mobilefacenet:{enabled:!1,modelPath:"https://vladmandic.github.io/human-models/models/mobilefacenet.json"},iris:{enabled:!0},emotion:{enabled:!1},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!1},hand:{enabled:!1},object:{enabled:!1},gesture:{enabled:!0}},I={order:2,multiplier:25,min:.2,max:.8},c={minConfidence:.6,minSize:224,maxTime:1e4,blinkMin:10,blinkMax:800,threshold:.5,mask:v.face.detector.mask,rotation:v.face.detector.rotation,cropFactor:v.face.detector.cropFactor,...I},n={faceCount:!1,faceConfidence:!1,facingCenter:!1,lookingCenter:!1,blinkDetected:!1,faceSize:!1,antispoofCheck:!1,livenessCheck:!1,elapsedMs:0},M=()=>n.faceCount&&n.faceSize&&n.blinkDetected&&n.facingCenter&&n.lookingCenter&&n.faceConfidence&&n.antispoofCheck&&n.livenessCheck,r={face:null,record:null},l={start:0,end:0,time:0},a=new H(v);a.env.perfadd=!1;a.draw.options.font='small-caps 18px "Lato"';a.draw.options.lineHeight=20;var e={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("fps"),match:document.getElementById("match"),name:document.getElementById("name"),save:document.getElementById("save"),delete:document.getElementById("delete"),retry:document.getElementById("retry"),source:document.getElementById("source"),ok:document.getElementById("ok")},h={detect:0,draw:0},y={detect:0,draw:0},E=0,p=(...t)=>{e.log.innerText+=t.join(" ")+` `,console.log(...t)},w=t=>e.fps.innerText=t;async function S(){w("starting webcam...");let t={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth}}},i=await navigator.mediaDevices.getUserMedia(t),s=new Promise(o=>{e.video.onloadeddata=()=>o(!0)});e.video.srcObject=i,e.video.play(),await s,e.canvas.width=e.video.videoWidth,e.canvas.height=e.video.videoHeight,a.env.initial&&p("video:",e.video.videoWidth,e.video.videoHeight,"|",i.getVideoTracks()[0].label),e.canvas.onclick=()=>{e.video.paused?e.video.play():e.video.pause()}}async function T(){if(!e.video.paused){r.face&&r.face.tensor&&a.tf.dispose(r.face.tensor),await a.detect(e.video);let t=a.now();y.detect=1e3/(t-h.detect),h.detect=t,requestAnimationFrame(T)}}async function L(){let t=await a.next(a.result);await a.draw.canvas(e.video,e.canvas),await a.draw.all(e.canvas,t);let i=a.now();if(y.draw=1e3/(i-h.draw),h.draw=i,w(`fps: ${y.detect.toFixed(1).padStart(5," ")} detect | ${y.draw.toFixed(1).padStart(5," ")} draw`),n.faceCount=a.result.face.length===1,n.faceCount){let o=Object.values(a.result.gesture).map(f=>f.gesture);(o.includes("blink left eye")||o.includes("blink right eye"))&&(l.start=a.now()),l.start>0&&!o.includes("blink left eye")&&!o.includes("blink right eye")&&(l.end=a.now()),n.blinkDetected=n.blinkDetected||Math.abs(l.end-l.start)>c.blinkMin&&Math.abs(l.end-l.start)c.minConfidence&&(a.result.face[0].faceScore||0)>c.minConfidence&&(a.result.face[0].genderScore||0)>c.minConfidence,n.antispoofCheck=(a.result.face[0].real||0)>c.minConfidence,n.livenessCheck=(a.result.face[0].live||0)>c.minConfidence,n.faceSize=a.result.face[0].box[2]>=c.minSize&&a.result.face[0].box[3]>=c.minSize}let s=32;for(let[o,f]of Object.entries(n)){let u=document.getElementById(`ok-${o}`);u||(u=document.createElement("div"),u.innerText=o,u.className="ok",u.style.top=`${s}px`,e.ok.appendChild(u)),typeof f=="boolean"?u.style.backgroundColor=f?"lightgreen":"lightcoral":u.innerText=`${o}:${f}`,s+=28}return M()||n.elapsedMs>c.maxTime?(e.video.pause(),a.result.face[0]):(n.elapsedMs=Math.trunc(a.now()-E),new Promise(o=>{setTimeout(async()=>{await L()&&o(a.result.face[0])},30)}))}async function P(){var t,i;if(e.name.value.length>0){let s=(t=e.canvas.getContext("2d"))==null?void 0:t.getImageData(0,0,e.canvas.width,e.canvas.height),o={id:0,name:e.name.value,descriptor:(i=r.face)==null?void 0:i.embedding,image:s};await x(o),p("saved face record:",o.name)}else p("invalid name")}async function z(){r.record&&r.record.id>0&&await D(r.record)}async function j(){var o,f;if((o=e.canvas.getContext("2d"))==null||o.clearRect(0,0,c.minSize,c.minSize),!r.face||!r.face.tensor||!r.face.embedding)return!1;if(console.log("face record:",r.face),a.tf.browser.toPixels(r.face.tensor,e.canvas),await k()===0)return p("face database is empty"),document.body.style.background="black",e.delete.style.display="none",!1;let t=await C(),i=t.map(u=>u.descriptor),s=await a.match(r.face.embedding,i,I);return r.record=t[s.index]||null,r.record&&(p(`best match: ${r.record.name} | id: ${r.record.id} | similarity: ${Math.round(1e3*s.similarity)/10}%`),e.name.value=r.record.name,e.source.style.display="",(f=e.source.getContext("2d"))==null||f.putImageData(r.record.image,0,0)),document.body.style.background=s.similarity>c.threshold?"darkgreen":"maroon",s.similarity>c.threshold}async function B(){var t,i,s,o;return n.faceCount=!1,n.faceConfidence=!1,n.facingCenter=!1,n.blinkDetected=!1,n.faceSize=!1,n.antispoofCheck=!1,n.livenessCheck=!1,n.elapsedMs=0,e.match.style.display="none",e.retry.style.display="none",e.source.style.display="none",document.body.style.background="black",await S(),await T(),E=a.now(),r.face=await L(),e.canvas.width=((i=(t=r.face)==null?void 0:t.tensor)==null?void 0:i.shape[1])||c.minSize,e.canvas.height=((o=(s=r.face)==null?void 0:s.tensor)==null?void 0:o.shape[0])||c.minSize,e.source.width=e.canvas.width,e.source.height=e.canvas.height,e.canvas.style.width="",e.match.style.display="flex",e.save.style.display="flex",e.delete.style.display="flex",e.retry.style.display="block",M()?j():(p("did not find valid face"),!1)}async function q(){p("human version:",a.version,"| tfjs version:",a.tf.version["tfjs-core"]),p("options:",JSON.stringify(c).replace(/{|}|"|\[|\]/g,"").replace(/,/g," ")),w("loading..."),p("known face records:",await k()),await S(),await a.load(),w("initializing..."),e.retry.addEventListener("click",B),e.save.addEventListener("click",P),e.delete.addEventListener("click",z),await a.warmup(),await B()}window.onload=q; -/** - * Human demo for browsers - * @default Human Library - * @summary - * @author - * @copyright - * @license MIT - */ //# sourceMappingURL=index.js.map diff --git a/demo/typescript/index.js b/demo/typescript/index.js index 60a01fe8..f01ad6bd 100644 --- a/demo/typescript/index.js +++ b/demo/typescript/index.js @@ -6,12 +6,4 @@ import{Human as p}from"../../dist/human.esm.js";var w={async:!0,modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},gesture:{enabled:!0}},t=new p(w);t.env.perfadd=!1;t.draw.options.font='small-caps 18px "Lato"';t.draw.options.lineHeight=20;var e={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},o={detect:0,draw:0,tensors:0},d={detect:0,draw:0},s=(...a)=>{e.log.innerText+=a.join(" ")+` `,console.log(...a)},r=a=>e.fps.innerText=a,b=a=>e.perf.innerText="tensors:"+t.tf.memory().numTensors+" | performance: "+JSON.stringify(a).replace(/"|{|}/g,"").replace(/,/g," | ");async function h(){r("starting webcam...");let a={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth},height:{ideal:document.body.clientHeight}}},n=await navigator.mediaDevices.getUserMedia(a),m=new Promise(u=>{e.video.onloadeddata=()=>u(!0)});e.video.srcObject=n,e.video.play(),await m,e.canvas.width=e.video.videoWidth,e.canvas.height=e.video.videoHeight;let i=n.getVideoTracks()[0],f=i.getCapabilities?i.getCapabilities():"",v=i.getSettings?i.getSettings():"",g=i.getConstraints?i.getConstraints():"";s("video:",e.video.videoWidth,e.video.videoHeight,i.label,{stream:n,track:i,settings:v,constraints:g,capabilities:f}),e.canvas.onclick=()=>{e.video.paused?e.video.play():e.video.pause()}}async function c(){if(!e.video.paused){await t.detect(e.video);let n=t.tf.memory().numTensors;n-o.tensors!==0&&s("allocated tensors:",n-o.tensors),o.tensors=n}let a=t.now();d.detect=1e3/(a-o.detect),o.detect=a,requestAnimationFrame(c)}async function l(){if(!e.video.paused){let n=await t.next(t.result);t.config.filter.flip?await t.draw.canvas(n.canvas,e.canvas):await t.draw.canvas(e.video,e.canvas),await t.draw.all(e.canvas,n),b(n.performance)}let a=t.now();d.draw=1e3/(a-o.draw),o.draw=a,r(e.video.paused?"paused":`fps: ${d.detect.toFixed(1).padStart(5," ")} detect | ${d.draw.toFixed(1).padStart(5," ")} draw`),setTimeout(l,30)}async function y(){s("human version:",t.version,"| tfjs version:",t.tf.version["tfjs-core"]),s("platform:",t.env.platform,"| agent:",t.env.agent),r("loading..."),await t.load(),s("backend:",t.tf.getBackend(),"| available:",t.env.backends),s("models stats:",t.getModelStats()),s("models loaded:",Object.values(t.models).filter(a=>a!==null).length),r("initializing..."),await t.warmup(),await h(),await c(),await l()}window.onload=y; -/** - * Human demo for browsers - * @default Human Library - * @summary - * @author - * @copyright - * @license MIT - */ //# sourceMappingURL=index.js.map diff --git a/dist/human.esm-nobundle.js b/dist/human.esm-nobundle.js index a88ff3e1..a791ccf9 100644 --- a/dist/human.esm-nobundle.js +++ b/dist/human.esm-nobundle.js @@ -828,12 +828,4 @@ BAkBAkBAkBAlACEgMZjdjbFW8bWrEx8YWANb6Fp+bfwab+vLDKMFK9qxH5L0bAr8OPRPKz2AY7J2 SbAjYZAI2E7AIEgIEgIEgMdkSy2NgY7MdlmyNoBXsxmFuyNgVTVjNV3KjlBRNTlXTVHKCrlIqt5T lBhEMohlFerLlBjEMohMVTEARDKCITsAk2AEgAAAkAAAAAAAAAAAAAAAAAAAAAAAASAAAAAAAAD/ 2Q==`;async function bA(e){let t=(r,s="application/octet-stream")=>fetch(`data:${s};base64,${r}`).then(a=>a.blob()),o,n;switch(e.config.warmup){case"face":o=await t(U2);break;case"body":case"full":o=await t(Y2);break;default:o=null}if(o){let r=await createImageBitmap(o);n=await e.detect(r,e.config),r.close()}return n}async function gA(e){return new Promise(t=>{let o;switch(e.config.warmup){case"face":o="data:image/jpeg;base64,"+U2;break;case"full":case"body":o="data:image/jpeg;base64,"+Y2;break;default:o=null}let n;if(typeof Image!="undefined")n=new Image;else if(T.Image)n=new T.Image;else return;n.onload=async()=>{let r=y0(n.naturalWidth,n.naturalHeight);if(!r)u("Warmup: Canvas not found"),t(void 0);else{let s=r.getContext("2d");s&&s.drawImage(n,0,0);let a=await e.image(r),i=await e.detect(a.tensor,e.config);t(i)}},o?n.src=o:t(void 0)})}async function MA(e){let t=r=>Buffer.from(r,"base64"),o;e.config.warmup==="face"?o=t(U2):o=t(Y2);let n;if("node"in A){let r=A.node.decodeJpeg(o),s=r.expandDims(0);e.tf.dispose(r),n=await e.detect(s,e.config),e.tf.dispose(s)}else e.config.debug&&u("Warmup tfjs-node not loaded");return n}async function PA(e){let t;return typeof createImageBitmap=="function"?t=await bA(e):typeof Image!="undefined"||T.Canvas!==void 0?t=await gA(e):t=await MA(e),t}async function RA(e){let t=A.getBackend(),o=A.backend();if(t!=="webgl"&&t!=="humangl"||!o||!o.checkCompileCompletion)return;A.env().set("ENGINE_COMPILE_ONLY",!0);let n=A.engine().state.numTensors,r=[];for(let[i,l]of Object.entries(e).filter(([c,d])=>c!==null&&d!==null)){let c=l.inputs&&l.inputs[0]&&l.inputs[0].shape?[...l.inputs[0].shape]:[1,64,64,3],d=l.inputs&&l.inputs[0]&&l.inputs[0].dtype?l.inputs[0].dtype:"float32";for(let x=0;xA.dispose(m)):A.dispose(x)}catch(x){u("compile fail model:",i)}A.dispose(y)}let s=await o.checkCompileCompletionAsync();o.getUniformLocations(),u("compile pass models:",r),u("compile pass kernels:",s.length),A.env().set("ENGINE_COMPILE_ONLY",!1);let a=A.engine().state.numTensors;a-n>0&&u("tensor leak:",a-n)}async function go(e,t){let o=b();return e.state="warmup",t&&(e.config=J(e.config,t)),!e.config.warmup||e.config.warmup.length===0||e.config.warmup==="none"?{face:[],body:[],hand:[],gesture:[],object:[],performance:e.performance,timestamp:b(),persons:[],error:null}:new Promise(async n=>{await RA(e.models);let r=await PA(e),s=b();e.config.debug&&u("warmup",e.config.warmup,Math.round(s-o),"ms"),e.emit("warmup"),n(r)})}var Ve,s2,a2,K2,Mo=class{constructor(t){w(this,"version");w(this,"config");w(this,"result");w(this,"state");w(this,"process");w(this,"tf");w(this,"env");w(this,"draw");w(this,"models");w(this,"events");w(this,"faceTriangulation");w(this,"faceUVMap");w(this,"performance");qe(this,Ve,void 0);qe(this,s2,void 0);qe(this,a2,void 0);w(this,"gl");w(this,"analyze",(...t)=>{if(!Xe(this,s2))return;let o=this.tf.engine().state.numTensors,n=Xe(this,Ve);Ue(this,Ve,o);let r=o-n;r!==0&&u(...t,r)});qe(this,K2,t=>{if(!Xe(this,a2))return null;if(!t)return"input is not defined";if(this.env.node&&!(t instanceof ge))return"input must be a tensor";try{this.tf.getBackend()}catch(o){return"backend not loaded"}return null});w(this,"similarity",k1);w(this,"distance",A2);w(this,"match",E1);w(this,"emit",t=>{var o;this.events&&this.events.dispatchEvent&&((o=this.events)==null||o.dispatchEvent(new Event(t)))});var n;this.env=T;let o=(((n=Ye)==null?void 0:n.tfjs)||A.version_core).replace(/-(.*)/,"");se.wasmPath=`https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@${o}/dist/`,se.modelBasePath=T.browser?"../models/":"file://models/",se.backend=T.browser?"humangl":"tensorflow",this.version=n5,Object.defineProperty(this,"version",{value:n5}),this.config=JSON.parse(JSON.stringify(se)),Object.seal(this.config),this.config.cacheModels=typeof indexedDB!="undefined",t&&(this.config=J(this.config,t)),X1(this.config),this.tf=A,this.state="idle",Ue(this,Ve,0),Ue(this,s2,!1),Ue(this,a2,!1),this.performance={},this.events=typeof EventTarget!="undefined"?new EventTarget:void 0,this.models=new r2,this.draw={options:l0,canvas:(r,s)=>P1(r,s),face:(r,s,a)=>Le(r,s,a),body:(r,s,a)=>We(r,s,a),hand:(r,s,a)=>Fe(r,s,a),gesture:(r,s,a)=>Be(r,s,a),object:(r,s,a)=>Ge(r,s,a),person:(r,s,a)=>M1(r,s,a),all:(r,s,a)=>R1(r,s,a)},this.result={face:[],body:[],hand:[],gesture:[],object:[],performance:{},timestamp:0,persons:[],error:null},this.process={tensor:null,canvas:null},this.faceTriangulation=n3,this.faceUVMap=r3,this.gl=Y,this.emit("create")}reset(){let t=this.config.backend;this.config=JSON.parse(JSON.stringify(se)),this.config.backend=t}validate(t){return Q2(se,t||this.config)}now(){return b()}image(t,o=!0){return Pe(t,this.config,o)}async segmentation(t,o){return no(t,o,this.config)}enhance(t){return G5(t)}compare(t,o){return Z1(this.config,t,o)}async init(){await q2(this,!0),await this.tf.ready()}async load(t){this.state="load";let o=b(),n=Object.values(this.models).filter(a=>a).length;t&&(this.config=J(this.config,t)),this.env.initial&&(this.config.debug&&u(`version: ${this.version}`),this.config.debug&&u(`tfjs version: ${this.tf.version["tfjs-core"]}`),await q2(this)||u("error: backend check failed"),await A.ready(),this.env.browser&&(this.config.debug&&u("configuration:",this.config),this.config.debug&&u("environment:",this.env),this.config.debug&&u("tf flags:",this.tf.ENV.flags))),await m1(this),this.env.initial&&this.config.debug&&u("tf engine state:",this.tf.engine().state.numBytes,"bytes",this.tf.engine().state.numTensors,"tensors"),this.env.initial=!1,Object.values(this.models).filter(a=>a).length!==n&&(await p1(this),this.emit("load"));let s=Math.trunc(b()-o);s>(this.performance.loadModels||0)&&(this.performance.loadModels=this.env.perfadd?(this.performance.loadModels||0)+s:s)}next(t=this.result){return po(t,this.config)}getModelStats(){return f1(this)}async warmup(t){let o=b(),n=await go(this,t),r=b();return this.performance.warmup=Math.trunc(r-o),n}async profile(t,o){let n=await this.tf.profile(()=>this.detect(t,o)),r={};for(let i of n.kernels)r[i.name]?r[i.name]+=i.kernelTimeMs:r[i.name]=i.kernelTimeMs;let s=[];Object.entries(r).forEach(i=>s.push({name:i[0],ms:i[1]})),s.sort((i,l)=>l.ms-i.ms),s.length=20;let a={};for(let i of s)a[i.name]=i.ms;return a}async detect(t,o){return this.state="detect",new Promise(async n=>{var R,M,g,f,P,C,k,I,j,O,F,V,Z,t0,z,g0,M0,v,o0,E,X,_;this.state="config";let r;this.config=J(this.config,o),this.state="check";let s=Xe(this,K2).call(this,t);s&&(u(s,t),this.emit("error"),n({face:[],body:[],hand:[],gesture:[],object:[],performance:this.performance,timestamp:b(),persons:[],error:s}));let a=b();await q2(this),await this.load(),r=b(),this.state="image";let i=await Pe(t,this.config);if(this.process=i,this.performance.inputProcess=this.env.perfadd?(this.performance.inputProcess||0)+Math.trunc(b()-r):Math.trunc(b()-r),this.analyze("Get Image:"),!i.tensor){this.config.debug&&u("could not convert input to tensor"),this.emit("error"),n({face:[],body:[],hand:[],gesture:[],object:[],performance:this.performance,timestamp:b(),persons:[],error:"could not convert input to tensor"});return}this.emit("image"),r=b(),this.config.skipAllowed=await D1(this.config,i.tensor),this.performance.totalFrames||(this.performance.totalFrames=0),this.performance.cachedFrames||(this.performance.cachedFrames=0),this.performance.totalFrames++,this.config.skipAllowed&&this.performance.cachedFrames++,this.performance.cacheCheck=this.env.perfadd?(this.performance.cacheCheck||0)+Math.trunc(b()-r):Math.trunc(b()-r),this.analyze("Check Changed:");let l=[],c=[],d=[],y=[];this.state="detect:face",this.config.async?(l=this.config.face.enabled?T1(this,i.tensor):[],this.performance.face&&delete this.performance.face):(r=b(),l=this.config.face.enabled?await T1(this,i.tensor):[],this.performance.face=this.env.perfadd?(this.performance.face||0)+Math.trunc(b()-r):Math.trunc(b()-r)),this.config.async&&(this.config.body.maxDetected===-1||this.config.hand.maxDetected===-1)&&(l=await l),this.analyze("Start Body:"),this.state="detect:body";let x=this.config.body.maxDetected===-1?J(this.config,{body:{maxDetected:this.config.face.enabled?1*l.length:1}}):this.config;this.config.async?((R=this.config.body.modelPath)!=null&&R.includes("posenet")?c=this.config.body.enabled?x1(i.tensor,x):[]:(M=this.config.body.modelPath)!=null&&M.includes("blazepose")?c=this.config.body.enabled?R5(i.tensor,x):[]:(g=this.config.body.modelPath)!=null&&g.includes("efficientpose")?c=this.config.body.enabled?S5(i.tensor,x):[]:(f=this.config.body.modelPath)!=null&&f.includes("movenet")&&(c=this.config.body.enabled?r1(i.tensor,x):[]),this.performance.body&&delete this.performance.body):(r=b(),(P=this.config.body.modelPath)!=null&&P.includes("posenet")?c=this.config.body.enabled?await x1(i.tensor,x):[]:(C=this.config.body.modelPath)!=null&&C.includes("blazepose")?c=this.config.body.enabled?await R5(i.tensor,x):[]:(k=this.config.body.modelPath)!=null&&k.includes("efficientpose")?c=this.config.body.enabled?await S5(i.tensor,x):[]:(I=this.config.body.modelPath)!=null&&I.includes("movenet")&&(c=this.config.body.enabled?await r1(i.tensor,x):[]),this.performance.body=this.env.perfadd?(this.performance.body||0)+Math.trunc(b()-r):Math.trunc(b()-r)),this.analyze("End Body:"),this.analyze("Start Hand:"),this.state="detect:hand";let m=this.config.hand.maxDetected===-1?J(this.config,{hand:{maxDetected:this.config.face.enabled?2*l.length:1}}):this.config;this.config.async?((O=(j=this.config.hand.detector)==null?void 0:j.modelPath)!=null&&O.includes("handdetect")?d=this.config.hand.enabled?q5(i.tensor,m):[]:(V=(F=this.config.hand.detector)==null?void 0:F.modelPath)!=null&&V.includes("handtrack")&&(d=this.config.hand.enabled?J5(i.tensor,m):[]),this.performance.hand&&delete this.performance.hand):(r=b(),(t0=(Z=this.config.hand.detector)==null?void 0:Z.modelPath)!=null&&t0.includes("handdetect")?d=this.config.hand.enabled?await q5(i.tensor,m):[]:(g0=(z=this.config.hand.detector)==null?void 0:z.modelPath)!=null&&g0.includes("handtrack")&&(d=this.config.hand.enabled?await J5(i.tensor,m):[]),this.performance.hand=this.env.perfadd?(this.performance.hand||0)+Math.trunc(b()-r):Math.trunc(b()-r)),this.analyze("End Hand:"),this.analyze("Start Object:"),this.state="detect:object",this.config.async?((M0=this.config.object.modelPath)!=null&&M0.includes("nanodet")?y=this.config.object.enabled?s1(i.tensor,this.config):[]:(v=this.config.object.modelPath)!=null&&v.includes("centernet")&&(y=this.config.object.enabled?w5(i.tensor,this.config):[]),this.performance.object&&delete this.performance.object):(r=b(),(o0=this.config.object.modelPath)!=null&&o0.includes("nanodet")?y=this.config.object.enabled?await s1(i.tensor,this.config):[]:(E=this.config.object.modelPath)!=null&&E.includes("centernet")&&(y=this.config.object.enabled?await w5(i.tensor,this.config):[]),this.performance.object=this.env.perfadd?(this.performance.object||0)+Math.trunc(b()-r):Math.trunc(b()-r)),this.analyze("End Object:"),this.state="detect:await",this.config.async&&([l,c,d,y]=await Promise.all([l,c,d,y])),this.state="detect:gesture";let h=[];this.config.gesture.enabled&&(r=b(),h=[...co(l),...xo(c),...mo(d),...fo(l)],this.config.async?this.performance.gesture&&delete this.performance.gesture:this.performance.gesture=this.env.perfadd?(this.performance.gesture||0)+Math.trunc(b()-r):Math.trunc(b()-r)),this.performance.total=this.env.perfadd?(this.performance.total||0)+Math.trunc(b()-a):Math.trunc(b()-a);let p=((_=(X=this.process)==null?void 0:X.tensor)==null?void 0:_.shape)||[];this.result={face:l,body:c,hand:d,gesture:h,object:y,performance:this.performance,canvas:this.process.canvas,timestamp:Date.now(),error:null,get persons(){return bo(l,c,d,h,p)}},A.dispose(i.tensor),this.emit("detect"),this.state="idle",n(this.result)})}};Ve=new WeakMap,s2=new WeakMap,a2=new WeakMap,K2=new WeakMap;export{Mo as Human,Mo as default,se as defaults,io as draw,T as env,ho as match,u1 as models}; -/** - * Human main module - * @default Human Library - * @summary - * @author - * @copyright - * @license MIT - */ //# sourceMappingURL=human.esm-nobundle.js.map diff --git a/dist/human.esm.js b/dist/human.esm.js index 70fcbeb8..8dcca3cd 100644 --- a/dist/human.esm.js +++ b/dist/human.esm.js @@ -86632,307 +86632,4 @@ export { match_exports as match, models_exports2 as models }; -/** - * @license - * Copyright 2017 Google LLC. All Rights Reserved. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============================================================================= - */ -/** - * @license - * Copyright 2018 Google LLC - * - * Use of this source code is governed by an MIT-style - * license that can be found in the LICENSE file or at - * https://opensource.org/licenses/MIT. - * ============================================================================= - */ -/** - * @license - * Copyright 2018 Google LLC. All Rights Reserved. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * ============================================================================= - */ -/** - * @license - * Copyright 2018 Google LLC. All Rights Reserved. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============================================================================= - */ -/** - * @license - * Copyright 2019 Google LLC - * - * Use of this source code is governed by an MIT-style - * license that can be found in the LICENSE file or at - * https://opensource.org/licenses/MIT. - * ============================================================================= - */ -/** - * @license - * Copyright 2019 Google LLC. All Rights Reserved. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * - * ============================================================================= - */ -/** - * @license - * Copyright 2019 Google LLC. All Rights Reserved. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============================================================================= - */ -/** - * @license - * Copyright 2019 Google LLC. All Rights Reserved. - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============================================================================= - */ -/** - * @license - * Copyright 2020 Google Inc. All Rights Reserved. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============================================================================= - */ -/** - * @license - * Copyright 2020 Google LLC - * - * Use of this source code is governed by an MIT-style - * license that can be found in the LICENSE file or at - * https://opensource.org/licenses/MIT. - * ============================================================================= - */ -/** - * @license - * Copyright 2020 Google LLC. All Rights Reserved. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use backend file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============================================================================= - */ -/** - * @license - * Copyright 2020 Google LLC. All Rights Reserved. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============================================================================= - */ -/** - * @license - * Copyright 2020 Google LLC. All Rights Reserved. - * Licensed under the Apache License, Version 2.0 (the License); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============================================================================= - */ -/** - * @license - * Copyright 2021 Google LLC. All Rights Reserved. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============================================================================= - */ -/** - * @license - * Copyright 2021 Google LLC. All Rights Reserved. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * https://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============================================================================= - */ -/** - * @license - * Copyright 2021 Google LLC. All Rights Reserved. - * Licensed under the Apache License, Version 2.0 (the License); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an AS IS BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============================================================================= - */ -/** - * @license - * Copyright 2022 Google Inc. All Rights Reserved. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============================================================================= - */ -/** - * @license - * Copyright 2022 Google LLC - * - * Use of this source code is governed by an MIT-style - * license that can be found in the LICENSE file or at - * https://opensource.org/licenses/MIT. - * ============================================================================= - */ -/** - * @license - * Copyright 2022 Google LLC. All Rights Reserved. - * Licensed under the Apache License, Version 2.0 (the "License"); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an "AS IS" BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============================================================================= - */ -/** - * @license - * Copyright 2022 Google LLC. All Rights Reserved. - * Licensed under the Apache License, Version 2.0 (the 'License'); - * you may not use this file except in compliance with the License. - * You may obtain a copy of the License at - * - * http://www.apache.org/licenses/LICENSE-2.0 - * - * Unless required by applicable law or agreed to in writing, software - * distributed under the License is distributed on an 'AS IS' BASIS, - * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - * See the License for the specific language governing permissions and - * limitations under the License. - * ============================================================================= - */ -/** - * Human main module - * @default Human Library - * @summary - * @author - * @copyright - * @license MIT - */ -/** @license See the LICENSE file. */ //# sourceMappingURL=human.esm.js.map diff --git a/dist/human.esm.js.map b/dist/human.esm.js.map index 8411c583..251d326b 100644 --- a/dist/human.esm.js.map +++ b/dist/human.esm.js.map @@ -1,7 +1,7 @@ { "version": 3, "sources": ["../src/util/util.ts", "../src/config.ts", "tfjs.esm.js", "../src/image/imagefxshaders.ts", "../src/image/imagefx.ts", "../src/image/enhance.ts", "../src/image/image.ts", "../src/util/env.ts", "../src/tfjs/load.ts", "../src/models.ts", "../src/gear/gear.ts", "../src/tfjs/constants.ts", "../src/gear/ssrnet-age.ts", "../src/gear/ssrnet-gender.ts", "../src/face/antispoof.ts", "../src/face/facemeshcoords.ts", "../src/face/facemeshutil.ts", "../src/face/blazeface.ts", "../src/body/blazeposecoords.ts", "../src/body/blazeposedetector.ts", "../src/util/box.ts", "../src/body/blazepose.ts", "../src/object/labels.ts", "../src/object/centernet.ts", "../src/body/efficientposecoords.ts", "../src/body/efficientpose.ts", "../src/gear/emotion.ts", "../src/face/mobilefacenet.ts", "../src/face/iris.ts", "../src/face/constants.ts", "../src/face/attention.ts", "../src/face/facemesh.ts", "../src/face/faceres.ts", "../src/hand/handposeutil.ts", "../src/hand/handposeanchors.ts", "../src/hand/handposedetector.ts", "../src/hand/handposepipeline.ts", "../src/hand/fingerdef.ts", "../src/hand/fingergesture.ts", "../src/hand/fingerpose.ts", "../src/hand/handpose.ts", "../src/hand/handtrack.ts", "../src/face/liveness.ts", "../src/body/movenetcoords.ts", "../src/body/movenetfix.ts", "../src/body/movenet.ts", "../src/object/nanodet.ts", "../src/body/posenetutils.ts", "../src/body/posenet.ts", "../src/segmentation/segmentation.ts", "../src/tfjs/humangl.ts", "../src/tfjs/backend.ts", "../src/draw/draw.ts", "../src/draw/primitives.ts", "../src/draw/options.ts", "../src/draw/face.ts", "../src/draw/body.ts", "../src/draw/hand.ts", "../src/draw/object.ts", "../src/draw/gesture.ts", "../src/face/mask.ts", "../src/face/angles.ts", "../src/face/face.ts", "../src/gesture/gesture.ts", "../src/util/interpolate.ts", "../src/face/match.ts", "../src/util/persons.ts", "../src/sample.ts", "../src/warmup.ts", "../src/human.ts"], - "sourcesContent": ["import type { Config } from '../exports';\n\n/**\n * Simple helper functions used accross codebase\n */\n\n// helper function: wrapper around console output\nexport function log(...msg): void {\n const dt = new Date();\n const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`;\n // eslint-disable-next-line no-console\n if (msg) console.log(ts, 'Human:', ...msg);\n}\n\n// helper function: join two paths\nexport function join(folder: string, file: string): string {\n const separator = folder.endsWith('/') ? '' : '/';\n const skipJoin = file.startsWith('.') || file.startsWith('/') || file.startsWith('http:') || file.startsWith('https:') || file.startsWith('file:');\n const path = skipJoin ? `${file}` : `${folder}${separator}${file}`;\n if (!path.toLocaleLowerCase().includes('.json')) throw new Error(`modelpath error: expecting json file: ${path}`);\n return path;\n}\n\n// helper function: gets elapsed time on both browser and nodejs\nexport const now = () => {\n if (typeof performance !== 'undefined') return performance.now();\n return parseInt((Number(process.hrtime.bigint()) / 1000 / 1000).toString());\n};\n\n// helper function: checks current config validity\nexport function validate(defaults: Partial, config: Partial, parent = 'config', msgs: Array<{ reason: string, where: string, expected?: string }> = []) {\n for (const key of Object.keys(config)) {\n if (typeof config[key] === 'object') {\n validate(defaults[key], config[key], key, msgs);\n } else {\n const defined = defaults && (typeof defaults[key] !== 'undefined');\n if (!defined) msgs.push({ reason: 'unknown property', where: `${parent}.${key} = ${config[key]}` });\n const same = defaults && typeof defaults[key] === typeof config[key];\n if (defined && !same) msgs.push({ reason: 'property type mismatch', where: `${parent}.${key} = ${config[key]}`, expected: typeof defaults[key] });\n }\n // ok = ok && defined && same;\n }\n if (config.debug && parent === 'config' && msgs.length > 0) log('invalid configuration', msgs);\n return msgs;\n}\n\n// helper function: perform deep merge of multiple objects so it allows full inheritance with overrides\nexport function mergeDeep(...objects) {\n const isObject = (obj) => obj && typeof obj === 'object';\n return objects.reduce((prev, obj) => {\n Object.keys(obj || {}).forEach((key) => {\n const pVal = prev[key];\n const oVal = obj[key];\n if (Array.isArray(pVal) && Array.isArray(oVal)) prev[key] = pVal.concat(...oVal);\n else if (isObject(pVal) && isObject(oVal)) prev[key] = mergeDeep(pVal, oVal);\n else prev[key] = oVal;\n });\n return prev;\n }, {});\n}\n\n// helper function: return min and max from input array\nexport const minmax = (data: Array) => data.reduce((acc: Array, val) => {\n acc[0] = (acc[0] === undefined || val < acc[0]) ? val : acc[0];\n acc[1] = (acc[1] === undefined || val > acc[1]) ? val : acc[1];\n return acc;\n}, []);\n\n// helper function: async wait\nexport async function wait(time: number) {\n const waiting = new Promise((resolve) => { setTimeout(() => resolve(true), time); });\n await waiting;\n}\n", "/* eslint-disable indent */\n/* eslint-disable no-multi-spaces */\n\n/** Generic config type inherited by all module types */\nexport interface GenericConfig {\n /** is module enabled? */\n enabled: boolean,\n /** path to model json file (relative to `modelBasePath` */\n modelPath: string,\n /** how many max frames to go without re-running model if cached results are acceptable\n * for two-phase models such as face and hand caching applies to bounding boxes detection only */\n skipFrames: number,\n /** how many max milliseconds to go without re-running model if cached results are acceptable\n * for two-phase models such as face and hand caching applies to bounding boxes detection only */\n skipTime: number,\n}\n\n/** Detector part of face configuration */\nexport interface FaceDetectorConfig extends GenericConfig {\n /** is face rotation correction performed after detecting face?\n * used to correctly analyze faces under high angles\n */\n rotation: boolean,\n /** maximum number of detected faces */\n maxDetected: number,\n /** minimum confidence for a detected face before results are discarded */\n minConfidence: number,\n /** minimum overlap between two detected faces before one is discarded */\n iouThreshold: number,\n /** should child models perform on masked image of a face */\n mask: boolean,\n /** should face detection return processed and cropped face tensor that can with an external model for addtional processing?\n * if enabled it must be manually deallocated to avoid memory leak */\n return: boolean,\n}\n\n/** Mesh part of face configuration */\nexport interface FaceMeshConfig extends GenericConfig {\n /** Keep detected faces that cannot be verified using facemesh */\n keepInvalid: boolean\n}\n\n/** Iris part of face configuration */\nexport interface FaceIrisConfig extends GenericConfig {}\n\n/** Attention part of face configuration */\nexport interface FaceAttentionConfig extends GenericConfig {}\n\n/** Description or face embedding part of face configuration\n * - also used by age and gender detection\n */\nexport interface FaceDescriptionConfig extends GenericConfig {\n /** minimum confidence for a detected face before results are discarded */\n minConfidence: number,\n}\n\n/** Emotion part of face configuration */\nexport interface FaceEmotionConfig extends GenericConfig {\n /** minimum confidence for a detected face before results are discarded */\n minConfidence: number,\n}\n\n/** Anti-spoofing part of face configuration */\nexport interface FaceAntiSpoofConfig extends GenericConfig {}\n\n/** Liveness part of face configuration */\nexport interface FaceLivenessConfig extends GenericConfig {}\n\n/** Gear part of face configuration */\nexport interface FaceGearConfig extends GenericConfig {\n /** minimum confidence for a detected race before results are discarded */\n minConfidence: number,\n}\n\n/** Configures all face-specific options: face detection, mesh analysis, age, gender, emotion detection and face description */\nexport interface FaceConfig extends GenericConfig {\n detector: Partial,\n mesh: Partial,\n attention: Partial,\n iris: Partial,\n description: Partial,\n emotion: Partial,\n antispoof: Partial,\n liveness: Partial,\n gear: Partial,\n}\n\n/** Configures all body detection specific options */\nexport interface BodyConfig extends GenericConfig {\n /** maximum number of detected bodies */\n maxDetected: number,\n /** minimum confidence for a detected body before results are discarded */\n minConfidence: number,\n /* experimental\n /** experimental: detector used for body model before actual analysis\n detector?: {\n /** experimental: enable body detector before body landmarks\n enabled: boolean,\n /** experimental: path to optional body detector model json file\n modelPath: string,\n /** experimental: minimum confidence for a detected body before results are discarded\n minConfidence: number,\n /** experimental: minimum overlap between two detected bodies before one is discarded\n iouThreshold: number\n },\n */\n}\n\n/** Configures all hand detection specific options */\nexport interface HandConfig extends GenericConfig {\n /** should hand rotation correction be performed after hand detection? */\n rotation: boolean,\n /** minimum confidence for a detected hand before results are discarded */\n minConfidence: number,\n /** minimum overlap between two detected hands before one is discarded */\n iouThreshold: number,\n /** maximum number of detected hands */\n maxDetected: number,\n /** should hand landmarks be detected or just return detected hand box */\n landmarks: boolean,\n detector: {\n /** path to hand detector model json */\n modelPath?: string,\n },\n skeleton: {\n /** path to hand skeleton model json */\n modelPath?: string,\n },\n}\n\n/** Configures all object detection specific options */\nexport interface ObjectConfig extends GenericConfig {\n /** minimum confidence for a detected objects before results are discarded */\n minConfidence: number,\n /** minimum overlap between two detected objects before one is discarded */\n iouThreshold: number,\n /** maximum number of detected objects */\n maxDetected: number,\n}\n\n/** Configures all body segmentation module\n * removes background from input containing person\n * if segmentation is enabled it will run as preprocessing task before any other model\n * alternatively leave it disabled and use it on-demand using human.segmentation method which can\n * remove background or replace it with user-provided background\n*/\nexport interface SegmentationConfig extends GenericConfig {\n /** blur segmentation output by pixels for more realistic image */\n blur: number,\n}\n\n/** Run input through image filters before inference\n * - available only in Browser environments\n * - image filters run with near-zero latency as they are executed on the GPU using WebGL\n*/\nexport interface FilterConfig {\n /** are image filters enabled? */\n enabled: boolean,\n /** perform image histogram equalization\n * - equalization is performed on input as a whole and detected face before its passed for further analysis\n */\n equalization: boolean,\n /** resize input width\n * - if both width and height are set to 0, there is no resizing\n * - if just one is set, second one is scaled automatically\n * - if both are set, values are used as-is\n */\n width: number,\n /** resize input height\n * - if both width and height are set to 0, there is no resizing\n * - if just one is set, second one is scaled automatically\n * - if both are set, values are used as-is\n */\n height: number,\n /** return processed canvas imagedata in result */\n return: boolean,\n /** flip input as mirror image */\n flip: boolean,\n /** range: -1 (darken) to 1 (lighten) */\n brightness: number,\n /** range: -1 (reduce contrast) to 1 (increase contrast) */\n contrast: number,\n /** range: 0 (no sharpening) to 1 (maximum sharpening) */\n sharpness: number,\n /** range: 0 (no blur) to N (blur radius in pixels) */\n blur: number\n /** range: -1 (reduce saturation) to 1 (increase saturation) */\n saturation: number,\n /** range: 0 (no change) to 360 (hue rotation in degrees) */\n hue: number,\n /** image negative */\n negative: boolean,\n /** image sepia colors */\n sepia: boolean,\n /** image vintage colors */\n vintage: boolean,\n /** image kodachrome colors */\n kodachrome: boolean,\n /** image technicolor colors */\n technicolor: boolean,\n /** image polaroid camera effect */\n polaroid: boolean,\n /** range: 0 (no pixelate) to N (number of pixels to pixelate) */\n pixelate: number,\n}\n\n/** Controlls gesture detection */\nexport interface GestureConfig {\n /** is gesture detection enabled? */\n enabled: boolean,\n}\n/** Possible TensorFlow backends */\nexport type BackendType = ['cpu', 'wasm', 'webgl', 'humangl', 'tensorflow', 'webgpu'];\n\n/** Possible values for `human.warmup` */\nexport type WarmupType = ['' | 'none' | 'face' | 'full' | 'body'];\n\n/**\n * Configuration interface definition for **Human** library\n * Contains all configurable parameters\n * Defaults: [config](https://github.com/vladmandic/human/blob/main/src/config.ts#L262)\n */\nexport interface Config {\n /** Backend used for TFJS operations\n * valid build-in backends are:\n * - Browser: `cpu`, `wasm`, `webgl`, `humangl`, `webgpu`\n * - NodeJS: `cpu`, `wasm`, `tensorflow`\n * default: `humangl` for browser and `tensorflow` for nodejs\n */\n backend: '' | 'cpu' | 'wasm' | 'webgl' | 'humangl' | 'tensorflow' | 'webgpu',\n\n /** Path to *.wasm files if backend is set to `wasm`\n *\n * default: auto-detects to link to CDN `jsdelivr` when running in browser\n */\n wasmPath: string,\n\n /** Force WASM loader to use platform fetch\n *\n * default: auto-detects to link to CDN `jsdelivr` when running in browser\n */\n wasmPlatformFetch: boolean,\n\n /** Print debug statements to console\n *\n * default: `true`\n */\n debug: boolean,\n\n /** Perform model loading and inference concurrently or sequentially\n *\n * default: `true`\n */\n async: boolean,\n\n /** What to use for `human.warmup()`\n * - warmup pre-initializes all models for faster inference but can take significant time on startup\n * - used by `webgl`, `humangl` and `webgpu` backends\n *\n * default: `full`\n */\n warmup: '' | 'none' | 'face' | 'full' | 'body',\n\n /** Base model path (typically starting with file://, http:// or https://) for all models\n * - individual modelPath values are relative to this path\n *\n * default: `../models/` for browsers and `file://models/` for nodejs\n */\n modelBasePath: string,\n\n /** Cache models in IndexDB on first sucessfull load\n * default: true if indexdb is available (browsers), false if its not (nodejs)\n */\n cacheModels: boolean,\n\n /** Cache sensitivity\n * - values 0..1 where 0.01 means reset cache if input changed more than 1%\n * - set to 0 to disable caching\n *\n * default: 0.7\n */\n cacheSensitivity: number;\n\n /** Perform immediate garbage collection on deallocated tensors instead of caching them */\n deallocate: boolean;\n\n /** Internal Variable */\n skipAllowed: boolean;\n\n /** Filter config {@link FilterConfig} */\n filter: Partial,\n\n /** Gesture config {@link GestureConfig} */\n gesture: Partial;\n\n /** Face config {@link FaceConfig} */\n face: Partial,\n\n /** Body config {@link BodyConfig} */\n body: Partial,\n\n /** Hand config {@link HandConfig} */\n hand: Partial,\n\n /** Object config {@link ObjectConfig} */\n object: Partial,\n\n /** Segmentation config {@link SegmentationConfig} */\n segmentation: Partial,\n}\n\n/** - [See all default Config values...](https://github.com/vladmandic/human/blob/main/src/config.ts#L262) */\nconst config: Config = {\n backend: '',\n modelBasePath: '',\n cacheModels: true,\n wasmPath: '',\n wasmPlatformFetch: false,\n debug: false,\n async: true,\n warmup: 'full',\n cacheSensitivity: 0.70,\n skipAllowed: false,\n deallocate: false,\n filter: {\n enabled: true,\n equalization: false,\n width: 0,\n height: 0,\n flip: false,\n return: true,\n brightness: 0,\n contrast: 0,\n sharpness: 0,\n blur: 0,\n saturation: 0,\n hue: 0,\n negative: false,\n sepia: false,\n vintage: false,\n kodachrome: false,\n technicolor: false,\n polaroid: false,\n pixelate: 0,\n },\n gesture: {\n enabled: true,\n },\n face: {\n enabled: true,\n detector: {\n modelPath: 'blazeface.json',\n rotation: true,\n maxDetected: 1,\n skipFrames: 99,\n skipTime: 2500,\n minConfidence: 0.2,\n iouThreshold: 0.1,\n mask: false,\n return: false,\n },\n mesh: {\n enabled: true,\n modelPath: 'facemesh.json',\n keepInvalid: false,\n },\n attention: {\n enabled: false,\n modelPath: 'facemesh-attention.json',\n },\n iris: {\n enabled: true,\n modelPath: 'iris.json',\n },\n emotion: {\n enabled: true,\n minConfidence: 0.1,\n skipFrames: 99,\n skipTime: 1500,\n modelPath: 'emotion.json',\n },\n description: {\n enabled: true,\n modelPath: 'faceres.json',\n skipFrames: 99,\n skipTime: 3000,\n minConfidence: 0.1,\n },\n antispoof: {\n enabled: false,\n skipFrames: 99,\n skipTime: 4000,\n modelPath: 'antispoof.json',\n },\n liveness: {\n enabled: false,\n skipFrames: 99,\n skipTime: 4000,\n modelPath: 'liveness.json',\n },\n },\n body: {\n enabled: true,\n modelPath: 'movenet-lightning.json',\n maxDetected: -1,\n minConfidence: 0.3,\n skipFrames: 1,\n skipTime: 200,\n },\n hand: {\n enabled: true,\n rotation: true,\n skipFrames: 99,\n skipTime: 1000,\n minConfidence: 0.50,\n iouThreshold: 0.2,\n maxDetected: -1,\n landmarks: true,\n detector: {\n modelPath: 'handtrack.json',\n },\n skeleton: {\n modelPath: 'handlandmark-full.json',\n },\n },\n object: {\n enabled: false,\n modelPath: 'mb3-centernet.json',\n minConfidence: 0.2,\n iouThreshold: 0.4,\n maxDetected: 10,\n skipFrames: 99,\n skipTime: 2000,\n },\n segmentation: {\n enabled: false,\n modelPath: 'selfie.json',\n blur: 8,\n },\n};\n\nexport { config as defaults };\n", "/*\n Human\n homepage: \n author: '\n*/\n\nvar __create = Object.create;\nvar __defProp = Object.defineProperty;\nvar __getOwnPropDesc = Object.getOwnPropertyDescriptor;\nvar __getOwnPropNames = Object.getOwnPropertyNames;\nvar __getProtoOf = Object.getPrototypeOf;\nvar __hasOwnProp = Object.prototype.hasOwnProperty;\nvar __commonJS = (cb, mod4) => function __require() {\n return mod4 || (0, cb[__getOwnPropNames(cb)[0]])((mod4 = { exports: {} }).exports, mod4), mod4.exports;\n};\nvar __export = (target, all5) => {\n for (var name in all5)\n __defProp(target, name, { get: all5[name], enumerable: true });\n};\nvar __copyProps = (to, from, except, desc) => {\n if (from && typeof from === \"object\" || typeof from === \"function\") {\n for (let key of __getOwnPropNames(from))\n if (!__hasOwnProp.call(to, key) && key !== except)\n __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable });\n }\n return to;\n};\nvar __toESM = (mod4, isNodeMode, target) => (target = mod4 != null ? __create(__getProtoOf(mod4)) : {}, __copyProps(\n isNodeMode || !mod4 || !mod4.__esModule ? __defProp(target, \"default\", { value: mod4, enumerable: true }) : target,\n mod4\n));\n\n// node_modules/.pnpm/long@4.0.0/node_modules/long/src/long.js\nvar require_long = __commonJS({\n \"node_modules/.pnpm/long@4.0.0/node_modules/long/src/long.js\"(exports, module) {\n module.exports = Long2;\n var wasm = null;\n try {\n wasm = new WebAssembly.Instance(new WebAssembly.Module(new Uint8Array([\n 0,\n 97,\n 115,\n 109,\n 1,\n 0,\n 0,\n 0,\n 1,\n 13,\n 2,\n 96,\n 0,\n 1,\n 127,\n 96,\n 4,\n 127,\n 127,\n 127,\n 127,\n 1,\n 127,\n 3,\n 7,\n 6,\n 0,\n 1,\n 1,\n 1,\n 1,\n 1,\n 6,\n 6,\n 1,\n 127,\n 1,\n 65,\n 0,\n 11,\n 7,\n 50,\n 6,\n 3,\n 109,\n 117,\n 108,\n 0,\n 1,\n 5,\n 100,\n 105,\n 118,\n 95,\n 115,\n 0,\n 2,\n 5,\n 100,\n 105,\n 118,\n 95,\n 117,\n 0,\n 3,\n 5,\n 114,\n 101,\n 109,\n 95,\n 115,\n 0,\n 4,\n 5,\n 114,\n 101,\n 109,\n 95,\n 117,\n 0,\n 5,\n 8,\n 103,\n 101,\n 116,\n 95,\n 104,\n 105,\n 103,\n 104,\n 0,\n 0,\n 10,\n 191,\n 1,\n 6,\n 4,\n 0,\n 35,\n 0,\n 11,\n 36,\n 1,\n 1,\n 126,\n 32,\n 0,\n 173,\n 32,\n 1,\n 173,\n 66,\n 32,\n 134,\n 132,\n 32,\n 2,\n 173,\n 32,\n 3,\n 173,\n 66,\n 32,\n 134,\n 132,\n 126,\n 34,\n 4,\n 66,\n 32,\n 135,\n 167,\n 36,\n 0,\n 32,\n 4,\n 167,\n 11,\n 36,\n 1,\n 1,\n 126,\n 32,\n 0,\n 173,\n 32,\n 1,\n 173,\n 66,\n 32,\n 134,\n 132,\n 32,\n 2,\n 173,\n 32,\n 3,\n 173,\n 66,\n 32,\n 134,\n 132,\n 127,\n 34,\n 4,\n 66,\n 32,\n 135,\n 167,\n 36,\n 0,\n 32,\n 4,\n 167,\n 11,\n 36,\n 1,\n 1,\n 126,\n 32,\n 0,\n 173,\n 32,\n 1,\n 173,\n 66,\n 32,\n 134,\n 132,\n 32,\n 2,\n 173,\n 32,\n 3,\n 173,\n 66,\n 32,\n 134,\n 132,\n 128,\n 34,\n 4,\n 66,\n 32,\n 135,\n 167,\n 36,\n 0,\n 32,\n 4,\n 167,\n 11,\n 36,\n 1,\n 1,\n 126,\n 32,\n 0,\n 173,\n 32,\n 1,\n 173,\n 66,\n 32,\n 134,\n 132,\n 32,\n 2,\n 173,\n 32,\n 3,\n 173,\n 66,\n 32,\n 134,\n 132,\n 129,\n 34,\n 4,\n 66,\n 32,\n 135,\n 167,\n 36,\n 0,\n 32,\n 4,\n 167,\n 11,\n 36,\n 1,\n 1,\n 126,\n 32,\n 0,\n 173,\n 32,\n 1,\n 173,\n 66,\n 32,\n 134,\n 132,\n 32,\n 2,\n 173,\n 32,\n 3,\n 173,\n 66,\n 32,\n 134,\n 132,\n 130,\n 34,\n 4,\n 66,\n 32,\n 135,\n 167,\n 36,\n 0,\n 32,\n 4,\n 167,\n 11\n ])), {}).exports;\n } catch (e2) {\n }\n function Long2(low, high, unsigned) {\n this.low = low | 0;\n this.high = high | 0;\n this.unsigned = !!unsigned;\n }\n Long2.prototype.__isLong__;\n Object.defineProperty(Long2.prototype, \"__isLong__\", { value: true });\n function isLong(obj) {\n return (obj && obj[\"__isLong__\"]) === true;\n }\n Long2.isLong = isLong;\n var INT_CACHE = {};\n var UINT_CACHE = {};\n function fromInt(value, unsigned) {\n var obj, cachedObj, cache;\n if (unsigned) {\n value >>>= 0;\n if (cache = 0 <= value && value < 256) {\n cachedObj = UINT_CACHE[value];\n if (cachedObj)\n return cachedObj;\n }\n obj = fromBits(value, (value | 0) < 0 ? -1 : 0, true);\n if (cache)\n UINT_CACHE[value] = obj;\n return obj;\n } else {\n value |= 0;\n if (cache = -128 <= value && value < 128) {\n cachedObj = INT_CACHE[value];\n if (cachedObj)\n return cachedObj;\n }\n obj = fromBits(value, value < 0 ? -1 : 0, false);\n if (cache)\n INT_CACHE[value] = obj;\n return obj;\n }\n }\n Long2.fromInt = fromInt;\n function fromNumber(value, unsigned) {\n if (isNaN(value))\n return unsigned ? UZERO : ZERO;\n if (unsigned) {\n if (value < 0)\n return UZERO;\n if (value >= TWO_PWR_64_DBL)\n return MAX_UNSIGNED_VALUE;\n } else {\n if (value <= -TWO_PWR_63_DBL)\n return MIN_VALUE;\n if (value + 1 >= TWO_PWR_63_DBL)\n return MAX_VALUE;\n }\n if (value < 0)\n return fromNumber(-value, unsigned).neg();\n return fromBits(value % TWO_PWR_32_DBL | 0, value / TWO_PWR_32_DBL | 0, unsigned);\n }\n Long2.fromNumber = fromNumber;\n function fromBits(lowBits, highBits, unsigned) {\n return new Long2(lowBits, highBits, unsigned);\n }\n Long2.fromBits = fromBits;\n var pow_dbl = Math.pow;\n function fromString(str, unsigned, radix) {\n if (str.length === 0)\n throw Error(\"empty string\");\n if (str === \"NaN\" || str === \"Infinity\" || str === \"+Infinity\" || str === \"-Infinity\")\n return ZERO;\n if (typeof unsigned === \"number\") {\n radix = unsigned, unsigned = false;\n } else {\n unsigned = !!unsigned;\n }\n radix = radix || 10;\n if (radix < 2 || 36 < radix)\n throw RangeError(\"radix\");\n var p2;\n if ((p2 = str.indexOf(\"-\")) > 0)\n throw Error(\"interior hyphen\");\n else if (p2 === 0) {\n return fromString(str.substring(1), unsigned, radix).neg();\n }\n var radixToPower = fromNumber(pow_dbl(radix, 8));\n var result = ZERO;\n for (var i2 = 0; i2 < str.length; i2 += 8) {\n var size = Math.min(8, str.length - i2), value = parseInt(str.substring(i2, i2 + size), radix);\n if (size < 8) {\n var power = fromNumber(pow_dbl(radix, size));\n result = result.mul(power).add(fromNumber(value));\n } else {\n result = result.mul(radixToPower);\n result = result.add(fromNumber(value));\n }\n }\n result.unsigned = unsigned;\n return result;\n }\n Long2.fromString = fromString;\n function fromValue(val, unsigned) {\n if (typeof val === \"number\")\n return fromNumber(val, unsigned);\n if (typeof val === \"string\")\n return fromString(val, unsigned);\n return fromBits(val.low, val.high, typeof unsigned === \"boolean\" ? unsigned : val.unsigned);\n }\n Long2.fromValue = fromValue;\n var TWO_PWR_16_DBL = 1 << 16;\n var TWO_PWR_24_DBL = 1 << 24;\n var TWO_PWR_32_DBL = TWO_PWR_16_DBL * TWO_PWR_16_DBL;\n var TWO_PWR_64_DBL = TWO_PWR_32_DBL * TWO_PWR_32_DBL;\n var TWO_PWR_63_DBL = TWO_PWR_64_DBL / 2;\n var TWO_PWR_24 = fromInt(TWO_PWR_24_DBL);\n var ZERO = fromInt(0);\n Long2.ZERO = ZERO;\n var UZERO = fromInt(0, true);\n Long2.UZERO = UZERO;\n var ONE = fromInt(1);\n Long2.ONE = ONE;\n var UONE = fromInt(1, true);\n Long2.UONE = UONE;\n var NEG_ONE = fromInt(-1);\n Long2.NEG_ONE = NEG_ONE;\n var MAX_VALUE = fromBits(4294967295 | 0, 2147483647 | 0, false);\n Long2.MAX_VALUE = MAX_VALUE;\n var MAX_UNSIGNED_VALUE = fromBits(4294967295 | 0, 4294967295 | 0, true);\n Long2.MAX_UNSIGNED_VALUE = MAX_UNSIGNED_VALUE;\n var MIN_VALUE = fromBits(0, 2147483648 | 0, false);\n Long2.MIN_VALUE = MIN_VALUE;\n var LongPrototype = Long2.prototype;\n LongPrototype.toInt = function toInt() {\n return this.unsigned ? this.low >>> 0 : this.low;\n };\n LongPrototype.toNumber = function toNumber() {\n if (this.unsigned)\n return (this.high >>> 0) * TWO_PWR_32_DBL + (this.low >>> 0);\n return this.high * TWO_PWR_32_DBL + (this.low >>> 0);\n };\n LongPrototype.toString = function toString(radix) {\n radix = radix || 10;\n if (radix < 2 || 36 < radix)\n throw RangeError(\"radix\");\n if (this.isZero())\n return \"0\";\n if (this.isNegative()) {\n if (this.eq(MIN_VALUE)) {\n var radixLong = fromNumber(radix), div3 = this.div(radixLong), rem1 = div3.mul(radixLong).sub(this);\n return div3.toString(radix) + rem1.toInt().toString(radix);\n } else\n return \"-\" + this.neg().toString(radix);\n }\n var radixToPower = fromNumber(pow_dbl(radix, 6), this.unsigned), rem = this;\n var result = \"\";\n while (true) {\n var remDiv = rem.div(radixToPower), intval = rem.sub(remDiv.mul(radixToPower)).toInt() >>> 0, digits = intval.toString(radix);\n rem = remDiv;\n if (rem.isZero())\n return digits + result;\n else {\n while (digits.length < 6)\n digits = \"0\" + digits;\n result = \"\" + digits + result;\n }\n }\n };\n LongPrototype.getHighBits = function getHighBits() {\n return this.high;\n };\n LongPrototype.getHighBitsUnsigned = function getHighBitsUnsigned() {\n return this.high >>> 0;\n };\n LongPrototype.getLowBits = function getLowBits() {\n return this.low;\n };\n LongPrototype.getLowBitsUnsigned = function getLowBitsUnsigned() {\n return this.low >>> 0;\n };\n LongPrototype.getNumBitsAbs = function getNumBitsAbs() {\n if (this.isNegative())\n return this.eq(MIN_VALUE) ? 64 : this.neg().getNumBitsAbs();\n var val = this.high != 0 ? this.high : this.low;\n for (var bit = 31; bit > 0; bit--)\n if ((val & 1 << bit) != 0)\n break;\n return this.high != 0 ? bit + 33 : bit + 1;\n };\n LongPrototype.isZero = function isZero() {\n return this.high === 0 && this.low === 0;\n };\n LongPrototype.eqz = LongPrototype.isZero;\n LongPrototype.isNegative = function isNegative() {\n return !this.unsigned && this.high < 0;\n };\n LongPrototype.isPositive = function isPositive() {\n return this.unsigned || this.high >= 0;\n };\n LongPrototype.isOdd = function isOdd() {\n return (this.low & 1) === 1;\n };\n LongPrototype.isEven = function isEven2() {\n return (this.low & 1) === 0;\n };\n LongPrototype.equals = function equals(other) {\n if (!isLong(other))\n other = fromValue(other);\n if (this.unsigned !== other.unsigned && this.high >>> 31 === 1 && other.high >>> 31 === 1)\n return false;\n return this.high === other.high && this.low === other.low;\n };\n LongPrototype.eq = LongPrototype.equals;\n LongPrototype.notEquals = function notEquals(other) {\n return !this.eq(other);\n };\n LongPrototype.neq = LongPrototype.notEquals;\n LongPrototype.ne = LongPrototype.notEquals;\n LongPrototype.lessThan = function lessThan(other) {\n return this.comp(other) < 0;\n };\n LongPrototype.lt = LongPrototype.lessThan;\n LongPrototype.lessThanOrEqual = function lessThanOrEqual(other) {\n return this.comp(other) <= 0;\n };\n LongPrototype.lte = LongPrototype.lessThanOrEqual;\n LongPrototype.le = LongPrototype.lessThanOrEqual;\n LongPrototype.greaterThan = function greaterThan(other) {\n return this.comp(other) > 0;\n };\n LongPrototype.gt = LongPrototype.greaterThan;\n LongPrototype.greaterThanOrEqual = function greaterThanOrEqual(other) {\n return this.comp(other) >= 0;\n };\n LongPrototype.gte = LongPrototype.greaterThanOrEqual;\n LongPrototype.ge = LongPrototype.greaterThanOrEqual;\n LongPrototype.compare = function compare(other) {\n if (!isLong(other))\n other = fromValue(other);\n if (this.eq(other))\n return 0;\n var thisNeg = this.isNegative(), otherNeg = other.isNegative();\n if (thisNeg && !otherNeg)\n return -1;\n if (!thisNeg && otherNeg)\n return 1;\n if (!this.unsigned)\n return this.sub(other).isNegative() ? -1 : 1;\n return other.high >>> 0 > this.high >>> 0 || other.high === this.high && other.low >>> 0 > this.low >>> 0 ? -1 : 1;\n };\n LongPrototype.comp = LongPrototype.compare;\n LongPrototype.negate = function negate() {\n if (!this.unsigned && this.eq(MIN_VALUE))\n return MIN_VALUE;\n return this.not().add(ONE);\n };\n LongPrototype.neg = LongPrototype.negate;\n LongPrototype.add = function add5(addend) {\n if (!isLong(addend))\n addend = fromValue(addend);\n var a48 = this.high >>> 16;\n var a32 = this.high & 65535;\n var a16 = this.low >>> 16;\n var a00 = this.low & 65535;\n var b48 = addend.high >>> 16;\n var b32 = addend.high & 65535;\n var b16 = addend.low >>> 16;\n var b00 = addend.low & 65535;\n var c48 = 0, c32 = 0, c16 = 0, c00 = 0;\n c00 += a00 + b00;\n c16 += c00 >>> 16;\n c00 &= 65535;\n c16 += a16 + b16;\n c32 += c16 >>> 16;\n c16 &= 65535;\n c32 += a32 + b32;\n c48 += c32 >>> 16;\n c32 &= 65535;\n c48 += a48 + b48;\n c48 &= 65535;\n return fromBits(c16 << 16 | c00, c48 << 16 | c32, this.unsigned);\n };\n LongPrototype.subtract = function subtract(subtrahend) {\n if (!isLong(subtrahend))\n subtrahend = fromValue(subtrahend);\n return this.add(subtrahend.neg());\n };\n LongPrototype.sub = LongPrototype.subtract;\n LongPrototype.multiply = function multiply4(multiplier) {\n if (this.isZero())\n return ZERO;\n if (!isLong(multiplier))\n multiplier = fromValue(multiplier);\n if (wasm) {\n var low = wasm.mul(\n this.low,\n this.high,\n multiplier.low,\n multiplier.high\n );\n return fromBits(low, wasm.get_high(), this.unsigned);\n }\n if (multiplier.isZero())\n return ZERO;\n if (this.eq(MIN_VALUE))\n return multiplier.isOdd() ? MIN_VALUE : ZERO;\n if (multiplier.eq(MIN_VALUE))\n return this.isOdd() ? MIN_VALUE : ZERO;\n if (this.isNegative()) {\n if (multiplier.isNegative())\n return this.neg().mul(multiplier.neg());\n else\n return this.neg().mul(multiplier).neg();\n } else if (multiplier.isNegative())\n return this.mul(multiplier.neg()).neg();\n if (this.lt(TWO_PWR_24) && multiplier.lt(TWO_PWR_24))\n return fromNumber(this.toNumber() * multiplier.toNumber(), this.unsigned);\n var a48 = this.high >>> 16;\n var a32 = this.high & 65535;\n var a16 = this.low >>> 16;\n var a00 = this.low & 65535;\n var b48 = multiplier.high >>> 16;\n var b32 = multiplier.high & 65535;\n var b16 = multiplier.low >>> 16;\n var b00 = multiplier.low & 65535;\n var c48 = 0, c32 = 0, c16 = 0, c00 = 0;\n c00 += a00 * b00;\n c16 += c00 >>> 16;\n c00 &= 65535;\n c16 += a16 * b00;\n c32 += c16 >>> 16;\n c16 &= 65535;\n c16 += a00 * b16;\n c32 += c16 >>> 16;\n c16 &= 65535;\n c32 += a32 * b00;\n c48 += c32 >>> 16;\n c32 &= 65535;\n c32 += a16 * b16;\n c48 += c32 >>> 16;\n c32 &= 65535;\n c32 += a00 * b32;\n c48 += c32 >>> 16;\n c32 &= 65535;\n c48 += a48 * b00 + a32 * b16 + a16 * b32 + a00 * b48;\n c48 &= 65535;\n return fromBits(c16 << 16 | c00, c48 << 16 | c32, this.unsigned);\n };\n LongPrototype.mul = LongPrototype.multiply;\n LongPrototype.divide = function divide(divisor) {\n if (!isLong(divisor))\n divisor = fromValue(divisor);\n if (divisor.isZero())\n throw Error(\"division by zero\");\n if (wasm) {\n if (!this.unsigned && this.high === -2147483648 && divisor.low === -1 && divisor.high === -1) {\n return this;\n }\n var low = (this.unsigned ? wasm.div_u : wasm.div_s)(\n this.low,\n this.high,\n divisor.low,\n divisor.high\n );\n return fromBits(low, wasm.get_high(), this.unsigned);\n }\n if (this.isZero())\n return this.unsigned ? UZERO : ZERO;\n var approx, rem, res;\n if (!this.unsigned) {\n if (this.eq(MIN_VALUE)) {\n if (divisor.eq(ONE) || divisor.eq(NEG_ONE))\n return MIN_VALUE;\n else if (divisor.eq(MIN_VALUE))\n return ONE;\n else {\n var halfThis = this.shr(1);\n approx = halfThis.div(divisor).shl(1);\n if (approx.eq(ZERO)) {\n return divisor.isNegative() ? ONE : NEG_ONE;\n } else {\n rem = this.sub(divisor.mul(approx));\n res = approx.add(rem.div(divisor));\n return res;\n }\n }\n } else if (divisor.eq(MIN_VALUE))\n return this.unsigned ? UZERO : ZERO;\n if (this.isNegative()) {\n if (divisor.isNegative())\n return this.neg().div(divisor.neg());\n return this.neg().div(divisor).neg();\n } else if (divisor.isNegative())\n return this.div(divisor.neg()).neg();\n res = ZERO;\n } else {\n if (!divisor.unsigned)\n divisor = divisor.toUnsigned();\n if (divisor.gt(this))\n return UZERO;\n if (divisor.gt(this.shru(1)))\n return UONE;\n res = UZERO;\n }\n rem = this;\n while (rem.gte(divisor)) {\n approx = Math.max(1, Math.floor(rem.toNumber() / divisor.toNumber()));\n var log22 = Math.ceil(Math.log(approx) / Math.LN2), delta = log22 <= 48 ? 1 : pow_dbl(2, log22 - 48), approxRes = fromNumber(approx), approxRem = approxRes.mul(divisor);\n while (approxRem.isNegative() || approxRem.gt(rem)) {\n approx -= delta;\n approxRes = fromNumber(approx, this.unsigned);\n approxRem = approxRes.mul(divisor);\n }\n if (approxRes.isZero())\n approxRes = ONE;\n res = res.add(approxRes);\n rem = rem.sub(approxRem);\n }\n return res;\n };\n LongPrototype.div = LongPrototype.divide;\n LongPrototype.modulo = function modulo(divisor) {\n if (!isLong(divisor))\n divisor = fromValue(divisor);\n if (wasm) {\n var low = (this.unsigned ? wasm.rem_u : wasm.rem_s)(\n this.low,\n this.high,\n divisor.low,\n divisor.high\n );\n return fromBits(low, wasm.get_high(), this.unsigned);\n }\n return this.sub(this.div(divisor).mul(divisor));\n };\n LongPrototype.mod = LongPrototype.modulo;\n LongPrototype.rem = LongPrototype.modulo;\n LongPrototype.not = function not() {\n return fromBits(~this.low, ~this.high, this.unsigned);\n };\n LongPrototype.and = function and(other) {\n if (!isLong(other))\n other = fromValue(other);\n return fromBits(this.low & other.low, this.high & other.high, this.unsigned);\n };\n LongPrototype.or = function or(other) {\n if (!isLong(other))\n other = fromValue(other);\n return fromBits(this.low | other.low, this.high | other.high, this.unsigned);\n };\n LongPrototype.xor = function xor(other) {\n if (!isLong(other))\n other = fromValue(other);\n return fromBits(this.low ^ other.low, this.high ^ other.high, this.unsigned);\n };\n LongPrototype.shiftLeft = function shiftLeft(numBits) {\n if (isLong(numBits))\n numBits = numBits.toInt();\n if ((numBits &= 63) === 0)\n return this;\n else if (numBits < 32)\n return fromBits(this.low << numBits, this.high << numBits | this.low >>> 32 - numBits, this.unsigned);\n else\n return fromBits(0, this.low << numBits - 32, this.unsigned);\n };\n LongPrototype.shl = LongPrototype.shiftLeft;\n LongPrototype.shiftRight = function shiftRight(numBits) {\n if (isLong(numBits))\n numBits = numBits.toInt();\n if ((numBits &= 63) === 0)\n return this;\n else if (numBits < 32)\n return fromBits(this.low >>> numBits | this.high << 32 - numBits, this.high >> numBits, this.unsigned);\n else\n return fromBits(this.high >> numBits - 32, this.high >= 0 ? 0 : -1, this.unsigned);\n };\n LongPrototype.shr = LongPrototype.shiftRight;\n LongPrototype.shiftRightUnsigned = function shiftRightUnsigned(numBits) {\n if (isLong(numBits))\n numBits = numBits.toInt();\n numBits &= 63;\n if (numBits === 0)\n return this;\n else {\n var high = this.high;\n if (numBits < 32) {\n var low = this.low;\n return fromBits(low >>> numBits | high << 32 - numBits, high >>> numBits, this.unsigned);\n } else if (numBits === 32)\n return fromBits(high, 0, this.unsigned);\n else\n return fromBits(high >>> numBits - 32, 0, this.unsigned);\n }\n };\n LongPrototype.shru = LongPrototype.shiftRightUnsigned;\n LongPrototype.shr_u = LongPrototype.shiftRightUnsigned;\n LongPrototype.toSigned = function toSigned() {\n if (!this.unsigned)\n return this;\n return fromBits(this.low, this.high, false);\n };\n LongPrototype.toUnsigned = function toUnsigned() {\n if (this.unsigned)\n return this;\n return fromBits(this.low, this.high, true);\n };\n LongPrototype.toBytes = function toBytes(le) {\n return le ? this.toBytesLE() : this.toBytesBE();\n };\n LongPrototype.toBytesLE = function toBytesLE() {\n var hi = this.high, lo = this.low;\n return [\n lo & 255,\n lo >>> 8 & 255,\n lo >>> 16 & 255,\n lo >>> 24,\n hi & 255,\n hi >>> 8 & 255,\n hi >>> 16 & 255,\n hi >>> 24\n ];\n };\n LongPrototype.toBytesBE = function toBytesBE() {\n var hi = this.high, lo = this.low;\n return [\n hi >>> 24,\n hi >>> 16 & 255,\n hi >>> 8 & 255,\n hi & 255,\n lo >>> 24,\n lo >>> 16 & 255,\n lo >>> 8 & 255,\n lo & 255\n ];\n };\n Long2.fromBytes = function fromBytes(bytes, unsigned, le) {\n return le ? Long2.fromBytesLE(bytes, unsigned) : Long2.fromBytesBE(bytes, unsigned);\n };\n Long2.fromBytesLE = function fromBytesLE(bytes, unsigned) {\n return new Long2(\n bytes[0] | bytes[1] << 8 | bytes[2] << 16 | bytes[3] << 24,\n bytes[4] | bytes[5] << 8 | bytes[6] << 16 | bytes[7] << 24,\n unsigned\n );\n };\n Long2.fromBytesBE = function fromBytesBE(bytes, unsigned) {\n return new Long2(\n bytes[4] << 24 | bytes[5] << 16 | bytes[6] << 8 | bytes[7],\n bytes[0] << 24 | bytes[1] << 16 | bytes[2] << 8 | bytes[3],\n unsigned\n );\n };\n }\n});\n\n// (disabled):node_modules/.pnpm/node-fetch@2.6.7/node_modules/node-fetch/browser.js\nvar require_browser = __commonJS({\n \"(disabled):node_modules/.pnpm/node-fetch@2.6.7/node_modules/node-fetch/browser.js\"() {\n }\n});\n\n// (disabled):util\nvar require_util = __commonJS({\n \"(disabled):util\"() {\n }\n});\n\n// node_modules/.pnpm/seedrandom@3.0.5/node_modules/seedrandom/lib/alea.js\nvar require_alea = __commonJS({\n \"node_modules/.pnpm/seedrandom@3.0.5/node_modules/seedrandom/lib/alea.js\"(exports, module) {\n (function(global2, module2, define2) {\n function Alea(seed) {\n var me = this, mash = Mash();\n me.next = function() {\n var t2 = 2091639 * me.s0 + me.c * 23283064365386963e-26;\n me.s0 = me.s1;\n me.s1 = me.s2;\n return me.s2 = t2 - (me.c = t2 | 0);\n };\n me.c = 1;\n me.s0 = mash(\" \");\n me.s1 = mash(\" \");\n me.s2 = mash(\" \");\n me.s0 -= mash(seed);\n if (me.s0 < 0) {\n me.s0 += 1;\n }\n me.s1 -= mash(seed);\n if (me.s1 < 0) {\n me.s1 += 1;\n }\n me.s2 -= mash(seed);\n if (me.s2 < 0) {\n me.s2 += 1;\n }\n mash = null;\n }\n function copy(f, t2) {\n t2.c = f.c;\n t2.s0 = f.s0;\n t2.s1 = f.s1;\n t2.s2 = f.s2;\n return t2;\n }\n function impl(seed, opts) {\n var xg = new Alea(seed), state = opts && opts.state, prng = xg.next;\n prng.int32 = function() {\n return xg.next() * 4294967296 | 0;\n };\n prng.double = function() {\n return prng() + (prng() * 2097152 | 0) * 11102230246251565e-32;\n };\n prng.quick = prng;\n if (state) {\n if (typeof state == \"object\")\n copy(state, xg);\n prng.state = function() {\n return copy(xg, {});\n };\n }\n return prng;\n }\n function Mash() {\n var n2 = 4022871197;\n var mash = function(data) {\n data = String(data);\n for (var i2 = 0; i2 < data.length; i2++) {\n n2 += data.charCodeAt(i2);\n var h = 0.02519603282416938 * n2;\n n2 = h >>> 0;\n h -= n2;\n h *= n2;\n n2 = h >>> 0;\n h -= n2;\n n2 += h * 4294967296;\n }\n return (n2 >>> 0) * 23283064365386963e-26;\n };\n return mash;\n }\n if (module2 && module2.exports) {\n module2.exports = impl;\n } else if (define2 && define2.amd) {\n define2(function() {\n return impl;\n });\n } else {\n this.alea = impl;\n }\n })(\n exports,\n typeof module == \"object\" && module,\n typeof define == \"function\" && define\n );\n }\n});\n\n// node_modules/.pnpm/seedrandom@3.0.5/node_modules/seedrandom/lib/xor128.js\nvar require_xor128 = __commonJS({\n \"node_modules/.pnpm/seedrandom@3.0.5/node_modules/seedrandom/lib/xor128.js\"(exports, module) {\n (function(global2, module2, define2) {\n function XorGen(seed) {\n var me = this, strseed = \"\";\n me.x = 0;\n me.y = 0;\n me.z = 0;\n me.w = 0;\n me.next = function() {\n var t2 = me.x ^ me.x << 11;\n me.x = me.y;\n me.y = me.z;\n me.z = me.w;\n return me.w ^= me.w >>> 19 ^ t2 ^ t2 >>> 8;\n };\n if (seed === (seed | 0)) {\n me.x = seed;\n } else {\n strseed += seed;\n }\n for (var k = 0; k < strseed.length + 64; k++) {\n me.x ^= strseed.charCodeAt(k) | 0;\n me.next();\n }\n }\n function copy(f, t2) {\n t2.x = f.x;\n t2.y = f.y;\n t2.z = f.z;\n t2.w = f.w;\n return t2;\n }\n function impl(seed, opts) {\n var xg = new XorGen(seed), state = opts && opts.state, prng = function() {\n return (xg.next() >>> 0) / 4294967296;\n };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11, bot = (xg.next() >>> 0) / 4294967296, result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof state == \"object\")\n copy(state, xg);\n prng.state = function() {\n return copy(xg, {});\n };\n }\n return prng;\n }\n if (module2 && module2.exports) {\n module2.exports = impl;\n } else if (define2 && define2.amd) {\n define2(function() {\n return impl;\n });\n } else {\n this.xor128 = impl;\n }\n })(\n exports,\n typeof module == \"object\" && module,\n typeof define == \"function\" && define\n );\n }\n});\n\n// node_modules/.pnpm/seedrandom@3.0.5/node_modules/seedrandom/lib/xorwow.js\nvar require_xorwow = __commonJS({\n \"node_modules/.pnpm/seedrandom@3.0.5/node_modules/seedrandom/lib/xorwow.js\"(exports, module) {\n (function(global2, module2, define2) {\n function XorGen(seed) {\n var me = this, strseed = \"\";\n me.next = function() {\n var t2 = me.x ^ me.x >>> 2;\n me.x = me.y;\n me.y = me.z;\n me.z = me.w;\n me.w = me.v;\n return (me.d = me.d + 362437 | 0) + (me.v = me.v ^ me.v << 4 ^ (t2 ^ t2 << 1)) | 0;\n };\n me.x = 0;\n me.y = 0;\n me.z = 0;\n me.w = 0;\n me.v = 0;\n if (seed === (seed | 0)) {\n me.x = seed;\n } else {\n strseed += seed;\n }\n for (var k = 0; k < strseed.length + 64; k++) {\n me.x ^= strseed.charCodeAt(k) | 0;\n if (k == strseed.length) {\n me.d = me.x << 10 ^ me.x >>> 4;\n }\n me.next();\n }\n }\n function copy(f, t2) {\n t2.x = f.x;\n t2.y = f.y;\n t2.z = f.z;\n t2.w = f.w;\n t2.v = f.v;\n t2.d = f.d;\n return t2;\n }\n function impl(seed, opts) {\n var xg = new XorGen(seed), state = opts && opts.state, prng = function() {\n return (xg.next() >>> 0) / 4294967296;\n };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11, bot = (xg.next() >>> 0) / 4294967296, result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof state == \"object\")\n copy(state, xg);\n prng.state = function() {\n return copy(xg, {});\n };\n }\n return prng;\n }\n if (module2 && module2.exports) {\n module2.exports = impl;\n } else if (define2 && define2.amd) {\n define2(function() {\n return impl;\n });\n } else {\n this.xorwow = impl;\n }\n })(\n exports,\n typeof module == \"object\" && module,\n typeof define == \"function\" && define\n );\n }\n});\n\n// node_modules/.pnpm/seedrandom@3.0.5/node_modules/seedrandom/lib/xorshift7.js\nvar require_xorshift7 = __commonJS({\n \"node_modules/.pnpm/seedrandom@3.0.5/node_modules/seedrandom/lib/xorshift7.js\"(exports, module) {\n (function(global2, module2, define2) {\n function XorGen(seed) {\n var me = this;\n me.next = function() {\n var X = me.x, i2 = me.i, t2, v, w;\n t2 = X[i2];\n t2 ^= t2 >>> 7;\n v = t2 ^ t2 << 24;\n t2 = X[i2 + 1 & 7];\n v ^= t2 ^ t2 >>> 10;\n t2 = X[i2 + 3 & 7];\n v ^= t2 ^ t2 >>> 3;\n t2 = X[i2 + 4 & 7];\n v ^= t2 ^ t2 << 7;\n t2 = X[i2 + 7 & 7];\n t2 = t2 ^ t2 << 13;\n v ^= t2 ^ t2 << 9;\n X[i2] = v;\n me.i = i2 + 1 & 7;\n return v;\n };\n function init2(me2, seed2) {\n var j, w, X = [];\n if (seed2 === (seed2 | 0)) {\n w = X[0] = seed2;\n } else {\n seed2 = \"\" + seed2;\n for (j = 0; j < seed2.length; ++j) {\n X[j & 7] = X[j & 7] << 15 ^ seed2.charCodeAt(j) + X[j + 1 & 7] << 13;\n }\n }\n while (X.length < 8)\n X.push(0);\n for (j = 0; j < 8 && X[j] === 0; ++j)\n ;\n if (j == 8)\n w = X[7] = -1;\n else\n w = X[j];\n me2.x = X;\n me2.i = 0;\n for (j = 256; j > 0; --j) {\n me2.next();\n }\n }\n init2(me, seed);\n }\n function copy(f, t2) {\n t2.x = f.x.slice();\n t2.i = f.i;\n return t2;\n }\n function impl(seed, opts) {\n if (seed == null)\n seed = +new Date();\n var xg = new XorGen(seed), state = opts && opts.state, prng = function() {\n return (xg.next() >>> 0) / 4294967296;\n };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11, bot = (xg.next() >>> 0) / 4294967296, result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (state.x)\n copy(state, xg);\n prng.state = function() {\n return copy(xg, {});\n };\n }\n return prng;\n }\n if (module2 && module2.exports) {\n module2.exports = impl;\n } else if (define2 && define2.amd) {\n define2(function() {\n return impl;\n });\n } else {\n this.xorshift7 = impl;\n }\n })(\n exports,\n typeof module == \"object\" && module,\n typeof define == \"function\" && define\n );\n }\n});\n\n// node_modules/.pnpm/seedrandom@3.0.5/node_modules/seedrandom/lib/xor4096.js\nvar require_xor4096 = __commonJS({\n \"node_modules/.pnpm/seedrandom@3.0.5/node_modules/seedrandom/lib/xor4096.js\"(exports, module) {\n (function(global2, module2, define2) {\n function XorGen(seed) {\n var me = this;\n me.next = function() {\n var w = me.w, X = me.X, i2 = me.i, t2, v;\n me.w = w = w + 1640531527 | 0;\n v = X[i2 + 34 & 127];\n t2 = X[i2 = i2 + 1 & 127];\n v ^= v << 13;\n t2 ^= t2 << 17;\n v ^= v >>> 15;\n t2 ^= t2 >>> 12;\n v = X[i2] = v ^ t2;\n me.i = i2;\n return v + (w ^ w >>> 16) | 0;\n };\n function init2(me2, seed2) {\n var t2, v, i2, j, w, X = [], limit = 128;\n if (seed2 === (seed2 | 0)) {\n v = seed2;\n seed2 = null;\n } else {\n seed2 = seed2 + \"\\0\";\n v = 0;\n limit = Math.max(limit, seed2.length);\n }\n for (i2 = 0, j = -32; j < limit; ++j) {\n if (seed2)\n v ^= seed2.charCodeAt((j + 32) % seed2.length);\n if (j === 0)\n w = v;\n v ^= v << 10;\n v ^= v >>> 15;\n v ^= v << 4;\n v ^= v >>> 13;\n if (j >= 0) {\n w = w + 1640531527 | 0;\n t2 = X[j & 127] ^= v + w;\n i2 = 0 == t2 ? i2 + 1 : 0;\n }\n }\n if (i2 >= 128) {\n X[(seed2 && seed2.length || 0) & 127] = -1;\n }\n i2 = 127;\n for (j = 4 * 128; j > 0; --j) {\n v = X[i2 + 34 & 127];\n t2 = X[i2 = i2 + 1 & 127];\n v ^= v << 13;\n t2 ^= t2 << 17;\n v ^= v >>> 15;\n t2 ^= t2 >>> 12;\n X[i2] = v ^ t2;\n }\n me2.w = w;\n me2.X = X;\n me2.i = i2;\n }\n init2(me, seed);\n }\n function copy(f, t2) {\n t2.i = f.i;\n t2.w = f.w;\n t2.X = f.X.slice();\n return t2;\n }\n ;\n function impl(seed, opts) {\n if (seed == null)\n seed = +new Date();\n var xg = new XorGen(seed), state = opts && opts.state, prng = function() {\n return (xg.next() >>> 0) / 4294967296;\n };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11, bot = (xg.next() >>> 0) / 4294967296, result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (state.X)\n copy(state, xg);\n prng.state = function() {\n return copy(xg, {});\n };\n }\n return prng;\n }\n if (module2 && module2.exports) {\n module2.exports = impl;\n } else if (define2 && define2.amd) {\n define2(function() {\n return impl;\n });\n } else {\n this.xor4096 = impl;\n }\n })(\n exports,\n typeof module == \"object\" && module,\n typeof define == \"function\" && define\n );\n }\n});\n\n// node_modules/.pnpm/seedrandom@3.0.5/node_modules/seedrandom/lib/tychei.js\nvar require_tychei = __commonJS({\n \"node_modules/.pnpm/seedrandom@3.0.5/node_modules/seedrandom/lib/tychei.js\"(exports, module) {\n (function(global2, module2, define2) {\n function XorGen(seed) {\n var me = this, strseed = \"\";\n me.next = function() {\n var b = me.b, c = me.c, d = me.d, a = me.a;\n b = b << 25 ^ b >>> 7 ^ c;\n c = c - d | 0;\n d = d << 24 ^ d >>> 8 ^ a;\n a = a - b | 0;\n me.b = b = b << 20 ^ b >>> 12 ^ c;\n me.c = c = c - d | 0;\n me.d = d << 16 ^ c >>> 16 ^ a;\n return me.a = a - b | 0;\n };\n me.a = 0;\n me.b = 0;\n me.c = 2654435769 | 0;\n me.d = 1367130551;\n if (seed === Math.floor(seed)) {\n me.a = seed / 4294967296 | 0;\n me.b = seed | 0;\n } else {\n strseed += seed;\n }\n for (var k = 0; k < strseed.length + 20; k++) {\n me.b ^= strseed.charCodeAt(k) | 0;\n me.next();\n }\n }\n function copy(f, t2) {\n t2.a = f.a;\n t2.b = f.b;\n t2.c = f.c;\n t2.d = f.d;\n return t2;\n }\n ;\n function impl(seed, opts) {\n var xg = new XorGen(seed), state = opts && opts.state, prng = function() {\n return (xg.next() >>> 0) / 4294967296;\n };\n prng.double = function() {\n do {\n var top = xg.next() >>> 11, bot = (xg.next() >>> 0) / 4294967296, result = (top + bot) / (1 << 21);\n } while (result === 0);\n return result;\n };\n prng.int32 = xg.next;\n prng.quick = prng;\n if (state) {\n if (typeof state == \"object\")\n copy(state, xg);\n prng.state = function() {\n return copy(xg, {});\n };\n }\n return prng;\n }\n if (module2 && module2.exports) {\n module2.exports = impl;\n } else if (define2 && define2.amd) {\n define2(function() {\n return impl;\n });\n } else {\n this.tychei = impl;\n }\n })(\n exports,\n typeof module == \"object\" && module,\n typeof define == \"function\" && define\n );\n }\n});\n\n// (disabled):crypto\nvar require_crypto = __commonJS({\n \"(disabled):crypto\"() {\n }\n});\n\n// node_modules/.pnpm/seedrandom@3.0.5/node_modules/seedrandom/seedrandom.js\nvar require_seedrandom = __commonJS({\n \"node_modules/.pnpm/seedrandom@3.0.5/node_modules/seedrandom/seedrandom.js\"(exports, module) {\n (function(global2, pool3, math) {\n var width = 256, chunks = 6, digits = 52, rngname = \"random\", startdenom = math.pow(width, chunks), significance = math.pow(2, digits), overflow = significance * 2, mask = width - 1, nodecrypto;\n function seedrandom5(seed, options, callback) {\n var key = [];\n options = options == true ? { entropy: true } : options || {};\n var shortseed = mixkey(flatten4(\n options.entropy ? [seed, tostring(pool3)] : seed == null ? autoseed() : seed,\n 3\n ), key);\n var arc4 = new ARC4(key);\n var prng = function() {\n var n2 = arc4.g(chunks), d = startdenom, x = 0;\n while (n2 < significance) {\n n2 = (n2 + x) * width;\n d *= width;\n x = arc4.g(1);\n }\n while (n2 >= overflow) {\n n2 /= 2;\n d /= 2;\n x >>>= 1;\n }\n return (n2 + x) / d;\n };\n prng.int32 = function() {\n return arc4.g(4) | 0;\n };\n prng.quick = function() {\n return arc4.g(4) / 4294967296;\n };\n prng.double = prng;\n mixkey(tostring(arc4.S), pool3);\n return (options.pass || callback || function(prng2, seed2, is_math_call, state) {\n if (state) {\n if (state.S) {\n copy(state, arc4);\n }\n prng2.state = function() {\n return copy(arc4, {});\n };\n }\n if (is_math_call) {\n math[rngname] = prng2;\n return seed2;\n } else\n return prng2;\n })(\n prng,\n shortseed,\n \"global\" in options ? options.global : this == math,\n options.state\n );\n }\n function ARC4(key) {\n var t2, keylen = key.length, me = this, i2 = 0, j = me.i = me.j = 0, s2 = me.S = [];\n if (!keylen) {\n key = [keylen++];\n }\n while (i2 < width) {\n s2[i2] = i2++;\n }\n for (i2 = 0; i2 < width; i2++) {\n s2[i2] = s2[j = mask & j + key[i2 % keylen] + (t2 = s2[i2])];\n s2[j] = t2;\n }\n (me.g = function(count2) {\n var t3, r2 = 0, i3 = me.i, j2 = me.j, s3 = me.S;\n while (count2--) {\n t3 = s3[i3 = mask & i3 + 1];\n r2 = r2 * width + s3[mask & (s3[i3] = s3[j2 = mask & j2 + t3]) + (s3[j2] = t3)];\n }\n me.i = i3;\n me.j = j2;\n return r2;\n })(width);\n }\n function copy(f, t2) {\n t2.i = f.i;\n t2.j = f.j;\n t2.S = f.S.slice();\n return t2;\n }\n ;\n function flatten4(obj, depth) {\n var result = [], typ = typeof obj, prop;\n if (depth && typ == \"object\") {\n for (prop in obj) {\n try {\n result.push(flatten4(obj[prop], depth - 1));\n } catch (e2) {\n }\n }\n }\n return result.length ? result : typ == \"string\" ? obj : obj + \"\\0\";\n }\n function mixkey(seed, key) {\n var stringseed = seed + \"\", smear, j = 0;\n while (j < stringseed.length) {\n key[mask & j] = mask & (smear ^= key[mask & j] * 19) + stringseed.charCodeAt(j++);\n }\n return tostring(key);\n }\n function autoseed() {\n try {\n var out;\n if (nodecrypto && (out = nodecrypto.randomBytes)) {\n out = out(width);\n } else {\n out = new Uint8Array(width);\n (global2.crypto || global2.msCrypto).getRandomValues(out);\n }\n return tostring(out);\n } catch (e2) {\n var browser = global2.navigator, plugins = browser && browser.plugins;\n return [+new Date(), global2, plugins, global2.screen, tostring(pool3)];\n }\n }\n function tostring(a) {\n return String.fromCharCode.apply(0, a);\n }\n mixkey(math.random(), pool3);\n if (typeof module == \"object\" && module.exports) {\n module.exports = seedrandom5;\n try {\n nodecrypto = require_crypto();\n } catch (ex) {\n }\n } else if (typeof define == \"function\" && define.amd) {\n define(function() {\n return seedrandom5;\n });\n } else {\n math[\"seed\" + rngname] = seedrandom5;\n }\n })(\n typeof self !== \"undefined\" ? self : exports,\n [],\n Math\n );\n }\n});\n\n// node_modules/.pnpm/seedrandom@3.0.5/node_modules/seedrandom/index.js\nvar require_seedrandom2 = __commonJS({\n \"node_modules/.pnpm/seedrandom@3.0.5/node_modules/seedrandom/index.js\"(exports, module) {\n var alea5 = require_alea();\n var xor128 = require_xor128();\n var xorwow = require_xorwow();\n var xorshift7 = require_xorshift7();\n var xor4096 = require_xor4096();\n var tychei = require_tychei();\n var sr = require_seedrandom();\n sr.alea = alea5;\n sr.xor128 = xor128;\n sr.xorwow = xorwow;\n sr.xorshift7 = xorshift7;\n sr.xor4096 = xor4096;\n sr.tychei = tychei;\n module.exports = sr;\n }\n});\n\n// (disabled):node_modules/.pnpm/string_decoder@1.3.0/node_modules/string_decoder/lib/string_decoder.js\nvar require_string_decoder = __commonJS({\n \"(disabled):node_modules/.pnpm/string_decoder@1.3.0/node_modules/string_decoder/lib/string_decoder.js\"() {\n }\n});\n\n// (disabled):fs\nvar require_fs = __commonJS({\n \"(disabled):fs\"() {\n }\n});\n\n// (disabled):path\nvar require_path = __commonJS({\n \"(disabled):path\"() {\n }\n});\n\n// (disabled):worker_threads\nvar require_worker_threads = __commonJS({\n \"(disabled):worker_threads\"() {\n }\n});\n\n// (disabled):perf_hooks\nvar require_perf_hooks = __commonJS({\n \"(disabled):perf_hooks\"() {\n }\n});\n\n// (disabled):os\nvar require_os = __commonJS({\n \"(disabled):os\"() {\n }\n});\n\n// node_modules/.pnpm/@tensorflow+tfjs-backend-wasm@3.19.0_hek32lflchivueqv5i4vgonghu/node_modules/@tensorflow/tfjs-backend-wasm/wasm-out/tfjs-backend-wasm-threaded-simd.js\nvar require_tfjs_backend_wasm_threaded_simd = __commonJS({\n \"node_modules/.pnpm/@tensorflow+tfjs-backend-wasm@3.19.0_hek32lflchivueqv5i4vgonghu/node_modules/@tensorflow/tfjs-backend-wasm/wasm-out/tfjs-backend-wasm-threaded-simd.js\"(exports, module) {\n var WasmBackendModuleThreadedSimd2 = (() => {\n var _scriptDir = typeof document !== \"undefined\" && document.currentScript ? document.currentScript.src : void 0;\n if (typeof __filename !== \"undefined\")\n _scriptDir = _scriptDir || __filename;\n return function(WasmBackendModuleThreadedSimd3) {\n WasmBackendModuleThreadedSimd3 = WasmBackendModuleThreadedSimd3 || {};\n function GROWABLE_HEAP_I8() {\n if (wasmMemory.buffer != buffer2) {\n updateGlobalBufferAndViews(wasmMemory.buffer);\n }\n return HEAP8;\n }\n function GROWABLE_HEAP_U8() {\n if (wasmMemory.buffer != buffer2) {\n updateGlobalBufferAndViews(wasmMemory.buffer);\n }\n return HEAPU8;\n }\n function GROWABLE_HEAP_I16() {\n if (wasmMemory.buffer != buffer2) {\n updateGlobalBufferAndViews(wasmMemory.buffer);\n }\n return HEAP16;\n }\n function GROWABLE_HEAP_U16() {\n if (wasmMemory.buffer != buffer2) {\n updateGlobalBufferAndViews(wasmMemory.buffer);\n }\n return HEAPU16;\n }\n function GROWABLE_HEAP_I32() {\n if (wasmMemory.buffer != buffer2) {\n updateGlobalBufferAndViews(wasmMemory.buffer);\n }\n return HEAP32;\n }\n function GROWABLE_HEAP_F32() {\n if (wasmMemory.buffer != buffer2) {\n updateGlobalBufferAndViews(wasmMemory.buffer);\n }\n return HEAPF32;\n }\n function GROWABLE_HEAP_F64() {\n if (wasmMemory.buffer != buffer2) {\n updateGlobalBufferAndViews(wasmMemory.buffer);\n }\n return HEAPF64;\n }\n var Module = typeof WasmBackendModuleThreadedSimd3 !== \"undefined\" ? WasmBackendModuleThreadedSimd3 : {};\n var readyPromiseResolve, readyPromiseReject;\n Module[\"ready\"] = new Promise(function(resolve, reject) {\n readyPromiseResolve = resolve;\n readyPromiseReject = reject;\n });\n var beforeListeners;\n if (typeof process !== \"undefined\" && process.listeners) {\n beforeListeners = { uncaughtException: process.listeners(\"uncaughtException\"), unhandledRejection: process.listeners(\"unhandledRejection\") };\n }\n var moduleOverrides = Object.assign({}, Module);\n var arguments_ = [];\n var thisProgram = \"./this.program\";\n var quit_ = (status, toThrow) => {\n throw toThrow;\n };\n var ENVIRONMENT_IS_WEB = typeof window === \"object\";\n var ENVIRONMENT_IS_WORKER = typeof importScripts === \"function\";\n var ENVIRONMENT_IS_NODE = typeof process === \"object\" && typeof process.versions === \"object\" && typeof process.versions.node === \"string\";\n var ENVIRONMENT_IS_PTHREAD = Module[\"ENVIRONMENT_IS_PTHREAD\"] || false;\n var scriptDirectory = \"\";\n function locateFile(path) {\n if (Module[\"locateFile\"]) {\n return Module[\"locateFile\"](path, scriptDirectory);\n }\n return scriptDirectory + path;\n }\n var read_, readAsync, readBinary, setWindowTitle;\n function logExceptionOnExit(e2) {\n if (e2 instanceof ExitStatus)\n return;\n let toLog = e2;\n err(\"exiting due to exception: \" + toLog);\n }\n var fs;\n var nodePath;\n var requireNodeFS;\n if (ENVIRONMENT_IS_NODE) {\n if (ENVIRONMENT_IS_WORKER) {\n scriptDirectory = require_path().dirname(scriptDirectory) + \"/\";\n } else {\n scriptDirectory = __dirname + \"/\";\n }\n requireNodeFS = () => {\n if (!nodePath) {\n fs = require_fs();\n nodePath = require_path();\n }\n };\n read_ = function shell_read(filename, binary) {\n requireNodeFS();\n filename = nodePath[\"normalize\"](filename);\n return fs.readFileSync(filename, binary ? void 0 : \"utf8\");\n };\n readBinary = (filename) => {\n var ret = read_(filename, true);\n if (!ret.buffer) {\n ret = new Uint8Array(ret);\n }\n return ret;\n };\n readAsync = (filename, onload, onerror) => {\n requireNodeFS();\n filename = nodePath[\"normalize\"](filename);\n fs.readFile(filename, function(err2, data) {\n if (err2)\n onerror(err2);\n else\n onload(data.buffer);\n });\n };\n if (process[\"argv\"].length > 1) {\n thisProgram = process[\"argv\"][1].replace(/\\\\/g, \"/\");\n }\n arguments_ = process[\"argv\"].slice(2);\n process[\"on\"](\"uncaughtException\", function(ex) {\n if (!(ex instanceof ExitStatus)) {\n throw ex;\n }\n });\n process[\"on\"](\"unhandledRejection\", function(reason) {\n throw reason;\n });\n quit_ = (status, toThrow) => {\n if (keepRuntimeAlive()) {\n process[\"exitCode\"] = status;\n throw toThrow;\n }\n logExceptionOnExit(toThrow);\n process[\"exit\"](status);\n };\n Module[\"inspect\"] = function() {\n return \"[Emscripten Module object]\";\n };\n let nodeWorkerThreads;\n try {\n nodeWorkerThreads = require_worker_threads();\n } catch (e2) {\n console.error('The \"worker_threads\" module is not supported in this node.js build - perhaps a newer version is needed?');\n throw e2;\n }\n global.Worker = nodeWorkerThreads.Worker;\n } else if (ENVIRONMENT_IS_WEB || ENVIRONMENT_IS_WORKER) {\n if (ENVIRONMENT_IS_WORKER) {\n scriptDirectory = self.location.href;\n } else if (typeof document !== \"undefined\" && document.currentScript) {\n scriptDirectory = document.currentScript.src;\n }\n if (typeof _scriptDir !== \"undefined\" && _scriptDir) {\n scriptDirectory = _scriptDir;\n }\n if (scriptDirectory.indexOf(\"blob:\") !== 0) {\n scriptDirectory = scriptDirectory.substr(0, scriptDirectory.replace(/[?#].*/, \"\").lastIndexOf(\"/\") + 1);\n } else {\n scriptDirectory = \"\";\n }\n if (!ENVIRONMENT_IS_NODE) {\n read_ = (url) => {\n var xhr = new XMLHttpRequest();\n xhr.open(\"GET\", url, false);\n xhr.send(null);\n return xhr.responseText;\n };\n if (ENVIRONMENT_IS_WORKER) {\n readBinary = (url) => {\n var xhr = new XMLHttpRequest();\n xhr.open(\"GET\", url, false);\n xhr.responseType = \"arraybuffer\";\n xhr.send(null);\n return new Uint8Array(xhr.response);\n };\n }\n readAsync = (url, onload, onerror) => {\n var xhr = new XMLHttpRequest();\n xhr.open(\"GET\", url, true);\n xhr.responseType = \"arraybuffer\";\n xhr.onload = () => {\n if (xhr.status == 200 || xhr.status == 0 && xhr.response) {\n onload(xhr.response);\n return;\n }\n onerror();\n };\n xhr.onerror = onerror;\n xhr.send(null);\n };\n }\n setWindowTitle = (title) => document.title = title;\n } else {\n }\n if (ENVIRONMENT_IS_NODE) {\n if (typeof performance === \"undefined\") {\n global.performance = require_perf_hooks().performance;\n }\n }\n var defaultPrint = console.log.bind(console);\n var defaultPrintErr = console.warn.bind(console);\n if (ENVIRONMENT_IS_NODE) {\n requireNodeFS();\n defaultPrint = (str) => fs.writeSync(1, str + \"\\n\");\n defaultPrintErr = (str) => fs.writeSync(2, str + \"\\n\");\n }\n var out = Module[\"print\"] || defaultPrint;\n var err = Module[\"printErr\"] || defaultPrintErr;\n Object.assign(Module, moduleOverrides);\n moduleOverrides = null;\n if (Module[\"arguments\"])\n arguments_ = Module[\"arguments\"];\n if (Module[\"thisProgram\"])\n thisProgram = Module[\"thisProgram\"];\n if (Module[\"quit\"])\n quit_ = Module[\"quit\"];\n var POINTER_SIZE = 4;\n function warnOnce(text) {\n if (!warnOnce.shown)\n warnOnce.shown = {};\n if (!warnOnce.shown[text]) {\n warnOnce.shown[text] = 1;\n err(text);\n }\n }\n function convertJsFunctionToWasm(func2, sig) {\n if (typeof WebAssembly.Function === \"function\") {\n var typeNames = { \"i\": \"i32\", \"j\": \"i64\", \"f\": \"f32\", \"d\": \"f64\" };\n var type = { parameters: [], results: sig[0] == \"v\" ? [] : [typeNames[sig[0]]] };\n for (var i2 = 1; i2 < sig.length; ++i2) {\n type.parameters.push(typeNames[sig[i2]]);\n }\n return new WebAssembly.Function(type, func2);\n }\n var typeSection = [1, 0, 1, 96];\n var sigRet = sig.slice(0, 1);\n var sigParam = sig.slice(1);\n var typeCodes = { \"i\": 127, \"j\": 126, \"f\": 125, \"d\": 124 };\n typeSection.push(sigParam.length);\n for (var i2 = 0; i2 < sigParam.length; ++i2) {\n typeSection.push(typeCodes[sigParam[i2]]);\n }\n if (sigRet == \"v\") {\n typeSection.push(0);\n } else {\n typeSection = typeSection.concat([1, typeCodes[sigRet]]);\n }\n typeSection[1] = typeSection.length - 2;\n var bytes = new Uint8Array([0, 97, 115, 109, 1, 0, 0, 0].concat(typeSection, [2, 7, 1, 1, 101, 1, 102, 0, 0, 7, 5, 1, 1, 102, 0, 0]));\n var module2 = new WebAssembly.Module(bytes);\n var instance = new WebAssembly.Instance(module2, { \"e\": { \"f\": func2 } });\n var wrappedFunc = instance.exports[\"f\"];\n return wrappedFunc;\n }\n var freeTableIndexes = [];\n var functionsInTableMap;\n function getEmptyTableSlot() {\n if (freeTableIndexes.length) {\n return freeTableIndexes.pop();\n }\n try {\n wasmTable.grow(1);\n } catch (err2) {\n if (!(err2 instanceof RangeError)) {\n throw err2;\n }\n throw \"Unable to grow wasm table. Set ALLOW_TABLE_GROWTH.\";\n }\n return wasmTable.length - 1;\n }\n function updateTableMap(offset, count2) {\n for (var i2 = offset; i2 < offset + count2; i2++) {\n var item = getWasmTableEntry(i2);\n if (item) {\n functionsInTableMap.set(item, i2);\n }\n }\n }\n var tempRet0 = 0;\n var setTempRet0 = (value) => {\n tempRet0 = value;\n };\n var Atomics_load = Atomics.load;\n var Atomics_store = Atomics.store;\n var Atomics_compareExchange = Atomics.compareExchange;\n var wasmBinary;\n if (Module[\"wasmBinary\"])\n wasmBinary = Module[\"wasmBinary\"];\n var noExitRuntime = Module[\"noExitRuntime\"] || true;\n if (typeof WebAssembly !== \"object\") {\n abort(\"no native wasm support detected\");\n }\n var wasmMemory;\n var wasmModule;\n var ABORT = false;\n var EXITSTATUS;\n function assert3(condition, text) {\n if (!condition) {\n abort(text);\n }\n }\n function getCFunc(ident) {\n var func2 = Module[\"_\" + ident];\n return func2;\n }\n function ccall(ident, returnType, argTypes, args, opts) {\n var toC = { \"string\": function(str) {\n var ret2 = 0;\n if (str !== null && str !== void 0 && str !== 0) {\n var len = (str.length << 2) + 1;\n ret2 = stackAlloc(len);\n stringToUTF8(str, ret2, len);\n }\n return ret2;\n }, \"array\": function(arr) {\n var ret2 = stackAlloc(arr.length);\n writeArrayToMemory(arr, ret2);\n return ret2;\n } };\n function convertReturnValue(ret2) {\n if (returnType === \"string\")\n return UTF8ToString(ret2);\n if (returnType === \"boolean\")\n return Boolean(ret2);\n return ret2;\n }\n var func2 = getCFunc(ident);\n var cArgs = [];\n var stack2 = 0;\n if (args) {\n for (var i2 = 0; i2 < args.length; i2++) {\n var converter = toC[argTypes[i2]];\n if (converter) {\n if (stack2 === 0)\n stack2 = stackSave();\n cArgs[i2] = converter(args[i2]);\n } else {\n cArgs[i2] = args[i2];\n }\n }\n }\n var ret = func2.apply(null, cArgs);\n function onDone(ret2) {\n if (stack2 !== 0)\n stackRestore(stack2);\n return convertReturnValue(ret2);\n }\n ret = onDone(ret);\n return ret;\n }\n function cwrap(ident, returnType, argTypes, opts) {\n argTypes = argTypes || [];\n var numericArgs = argTypes.every(function(type) {\n return type === \"number\";\n });\n var numericRet = returnType !== \"string\";\n if (numericRet && numericArgs && !opts) {\n return getCFunc(ident);\n }\n return function() {\n return ccall(ident, returnType, argTypes, arguments, opts);\n };\n }\n var ALLOC_STACK = 1;\n function TextDecoderWrapper(encoding) {\n var textDecoder = new TextDecoder(encoding);\n this.decode = (data) => {\n if (data.buffer instanceof SharedArrayBuffer) {\n data = new Uint8Array(data);\n }\n return textDecoder.decode.call(textDecoder, data);\n };\n }\n var UTF8Decoder = typeof TextDecoder !== \"undefined\" ? new TextDecoderWrapper(\"utf8\") : void 0;\n function UTF8ArrayToString(heap, idx, maxBytesToRead) {\n var endIdx = idx + maxBytesToRead;\n var endPtr = idx;\n while (heap[endPtr] && !(endPtr >= endIdx))\n ++endPtr;\n if (endPtr - idx > 16 && heap.subarray && UTF8Decoder) {\n return UTF8Decoder.decode(heap.subarray(idx, endPtr));\n } else {\n var str = \"\";\n while (idx < endPtr) {\n var u0 = heap[idx++];\n if (!(u0 & 128)) {\n str += String.fromCharCode(u0);\n continue;\n }\n var u1 = heap[idx++] & 63;\n if ((u0 & 224) == 192) {\n str += String.fromCharCode((u0 & 31) << 6 | u1);\n continue;\n }\n var u2 = heap[idx++] & 63;\n if ((u0 & 240) == 224) {\n u0 = (u0 & 15) << 12 | u1 << 6 | u2;\n } else {\n u0 = (u0 & 7) << 18 | u1 << 12 | u2 << 6 | heap[idx++] & 63;\n }\n if (u0 < 65536) {\n str += String.fromCharCode(u0);\n } else {\n var ch = u0 - 65536;\n str += String.fromCharCode(55296 | ch >> 10, 56320 | ch & 1023);\n }\n }\n }\n return str;\n }\n function UTF8ToString(ptr, maxBytesToRead) {\n return ptr ? UTF8ArrayToString(GROWABLE_HEAP_U8(), ptr, maxBytesToRead) : \"\";\n }\n function stringToUTF8Array(str, heap, outIdx, maxBytesToWrite) {\n if (!(maxBytesToWrite > 0))\n return 0;\n var startIdx = outIdx;\n var endIdx = outIdx + maxBytesToWrite - 1;\n for (var i2 = 0; i2 < str.length; ++i2) {\n var u = str.charCodeAt(i2);\n if (u >= 55296 && u <= 57343) {\n var u1 = str.charCodeAt(++i2);\n u = 65536 + ((u & 1023) << 10) | u1 & 1023;\n }\n if (u <= 127) {\n if (outIdx >= endIdx)\n break;\n heap[outIdx++] = u;\n } else if (u <= 2047) {\n if (outIdx + 1 >= endIdx)\n break;\n heap[outIdx++] = 192 | u >> 6;\n heap[outIdx++] = 128 | u & 63;\n } else if (u <= 65535) {\n if (outIdx + 2 >= endIdx)\n break;\n heap[outIdx++] = 224 | u >> 12;\n heap[outIdx++] = 128 | u >> 6 & 63;\n heap[outIdx++] = 128 | u & 63;\n } else {\n if (outIdx + 3 >= endIdx)\n break;\n heap[outIdx++] = 240 | u >> 18;\n heap[outIdx++] = 128 | u >> 12 & 63;\n heap[outIdx++] = 128 | u >> 6 & 63;\n heap[outIdx++] = 128 | u & 63;\n }\n }\n heap[outIdx] = 0;\n return outIdx - startIdx;\n }\n function stringToUTF8(str, outPtr, maxBytesToWrite) {\n return stringToUTF8Array(str, GROWABLE_HEAP_U8(), outPtr, maxBytesToWrite);\n }\n function lengthBytesUTF8(str) {\n var len = 0;\n for (var i2 = 0; i2 < str.length; ++i2) {\n var u = str.charCodeAt(i2);\n if (u >= 55296 && u <= 57343)\n u = 65536 + ((u & 1023) << 10) | str.charCodeAt(++i2) & 1023;\n if (u <= 127)\n ++len;\n else if (u <= 2047)\n len += 2;\n else if (u <= 65535)\n len += 3;\n else\n len += 4;\n }\n return len;\n }\n var UTF16Decoder = typeof TextDecoder !== \"undefined\" ? new TextDecoderWrapper(\"utf-16le\") : void 0;\n function writeArrayToMemory(array2, buffer3) {\n GROWABLE_HEAP_I8().set(array2, buffer3);\n }\n function writeAsciiToMemory(str, buffer3, dontAddNull) {\n for (var i2 = 0; i2 < str.length; ++i2) {\n GROWABLE_HEAP_I8()[buffer3++ >> 0] = str.charCodeAt(i2);\n }\n if (!dontAddNull)\n GROWABLE_HEAP_I8()[buffer3 >> 0] = 0;\n }\n function alignUp(x, multiple) {\n if (x % multiple > 0) {\n x += multiple - x % multiple;\n }\n return x;\n }\n var buffer2, HEAP8, HEAPU8, HEAP16, HEAPU16, HEAP32, HEAPU32, HEAPF32, HEAPF64;\n if (ENVIRONMENT_IS_PTHREAD) {\n buffer2 = Module[\"buffer\"];\n }\n function updateGlobalBufferAndViews(buf) {\n buffer2 = buf;\n Module[\"HEAP8\"] = HEAP8 = new Int8Array(buf);\n Module[\"HEAP16\"] = HEAP16 = new Int16Array(buf);\n Module[\"HEAP32\"] = HEAP32 = new Int32Array(buf);\n Module[\"HEAPU8\"] = HEAPU8 = new Uint8Array(buf);\n Module[\"HEAPU16\"] = HEAPU16 = new Uint16Array(buf);\n Module[\"HEAPU32\"] = HEAPU32 = new Uint32Array(buf);\n Module[\"HEAPF32\"] = HEAPF32 = new Float32Array(buf);\n Module[\"HEAPF64\"] = HEAPF64 = new Float64Array(buf);\n }\n var INITIAL_MEMORY = Module[\"INITIAL_MEMORY\"] || 16777216;\n if (ENVIRONMENT_IS_PTHREAD) {\n wasmMemory = Module[\"wasmMemory\"];\n buffer2 = Module[\"buffer\"];\n } else {\n if (Module[\"wasmMemory\"]) {\n wasmMemory = Module[\"wasmMemory\"];\n } else {\n wasmMemory = new WebAssembly.Memory({ \"initial\": INITIAL_MEMORY / 65536, \"maximum\": 2147483648 / 65536, \"shared\": true });\n if (!(wasmMemory.buffer instanceof SharedArrayBuffer)) {\n err(\"requested a shared WebAssembly.Memory but the returned buffer is not a SharedArrayBuffer, indicating that while the browser has SharedArrayBuffer it does not have WebAssembly threads support - you may need to set a flag\");\n if (ENVIRONMENT_IS_NODE) {\n console.log(\"(on node you may need: --experimental-wasm-threads --experimental-wasm-bulk-memory and also use a recent version)\");\n }\n throw Error(\"bad memory\");\n }\n }\n }\n if (wasmMemory) {\n buffer2 = wasmMemory.buffer;\n }\n INITIAL_MEMORY = buffer2.byteLength;\n updateGlobalBufferAndViews(buffer2);\n var wasmTable;\n var __ATPRERUN__ = [];\n var __ATINIT__ = [];\n var __ATEXIT__ = [];\n var __ATPOSTRUN__ = [];\n var runtimeInitialized = false;\n var runtimeExited = false;\n var runtimeKeepaliveCounter = 0;\n function keepRuntimeAlive() {\n return noExitRuntime || runtimeKeepaliveCounter > 0;\n }\n function preRun() {\n if (Module[\"preRun\"]) {\n if (typeof Module[\"preRun\"] == \"function\")\n Module[\"preRun\"] = [Module[\"preRun\"]];\n while (Module[\"preRun\"].length) {\n addOnPreRun(Module[\"preRun\"].shift());\n }\n }\n callRuntimeCallbacks(__ATPRERUN__);\n }\n function initRuntime() {\n runtimeInitialized = true;\n if (ENVIRONMENT_IS_PTHREAD)\n return;\n callRuntimeCallbacks(__ATINIT__);\n }\n function exitRuntime() {\n if (ENVIRONMENT_IS_PTHREAD)\n return;\n PThread.terminateAllThreads();\n runtimeExited = true;\n }\n function postRun() {\n if (ENVIRONMENT_IS_PTHREAD)\n return;\n if (Module[\"postRun\"]) {\n if (typeof Module[\"postRun\"] == \"function\")\n Module[\"postRun\"] = [Module[\"postRun\"]];\n while (Module[\"postRun\"].length) {\n addOnPostRun(Module[\"postRun\"].shift());\n }\n }\n callRuntimeCallbacks(__ATPOSTRUN__);\n }\n function addOnPreRun(cb) {\n __ATPRERUN__.unshift(cb);\n }\n function addOnInit(cb) {\n __ATINIT__.unshift(cb);\n }\n function addOnPostRun(cb) {\n __ATPOSTRUN__.unshift(cb);\n }\n var runDependencies = 0;\n var runDependencyWatcher = null;\n var dependenciesFulfilled = null;\n function addRunDependency(id) {\n runDependencies++;\n if (Module[\"monitorRunDependencies\"]) {\n Module[\"monitorRunDependencies\"](runDependencies);\n }\n }\n function removeRunDependency(id) {\n runDependencies--;\n if (Module[\"monitorRunDependencies\"]) {\n Module[\"monitorRunDependencies\"](runDependencies);\n }\n if (runDependencies == 0) {\n if (runDependencyWatcher !== null) {\n clearInterval(runDependencyWatcher);\n runDependencyWatcher = null;\n }\n if (dependenciesFulfilled) {\n var callback = dependenciesFulfilled;\n dependenciesFulfilled = null;\n callback();\n }\n }\n }\n Module[\"preloadedImages\"] = {};\n Module[\"preloadedAudios\"] = {};\n function abort(what) {\n if (ENVIRONMENT_IS_PTHREAD) {\n postMessage({ \"cmd\": \"onAbort\", \"arg\": what });\n } else {\n if (Module[\"onAbort\"]) {\n Module[\"onAbort\"](what);\n }\n }\n what = \"Aborted(\" + what + \")\";\n err(what);\n ABORT = true;\n EXITSTATUS = 1;\n what += \". Build with -s ASSERTIONS=1 for more info.\";\n var e2 = new WebAssembly.RuntimeError(what);\n readyPromiseReject(e2);\n throw e2;\n }\n var dataURIPrefix = \"data:application/octet-stream;base64,\";\n function isDataURI(filename) {\n return filename.startsWith(dataURIPrefix);\n }\n function isFileURI(filename) {\n return filename.startsWith(\"file://\");\n }\n var wasmBinaryFile;\n wasmBinaryFile = \"tfjs-backend-wasm-threaded-simd.wasm\";\n if (!isDataURI(wasmBinaryFile)) {\n wasmBinaryFile = locateFile(wasmBinaryFile);\n }\n function getBinary(file) {\n try {\n if (file == wasmBinaryFile && wasmBinary) {\n return new Uint8Array(wasmBinary);\n }\n if (readBinary) {\n return readBinary(file);\n } else {\n throw \"both async and sync fetching of the wasm failed\";\n }\n } catch (err2) {\n abort(err2);\n }\n }\n function getBinaryPromise() {\n if (!wasmBinary && (ENVIRONMENT_IS_WEB || ENVIRONMENT_IS_WORKER)) {\n if (typeof fetch === \"function\" && !isFileURI(wasmBinaryFile)) {\n return fetch(wasmBinaryFile, { credentials: \"same-origin\" }).then(function(response) {\n if (!response[\"ok\"]) {\n throw \"failed to load wasm binary file at '\" + wasmBinaryFile + \"'\";\n }\n return response[\"arrayBuffer\"]();\n }).catch(function() {\n return getBinary(wasmBinaryFile);\n });\n } else {\n if (readAsync) {\n return new Promise(function(resolve, reject) {\n readAsync(wasmBinaryFile, function(response) {\n resolve(new Uint8Array(response));\n }, reject);\n });\n }\n }\n }\n return Promise.resolve().then(function() {\n return getBinary(wasmBinaryFile);\n });\n }\n function createWasm() {\n var info = { \"env\": asmLibraryArg, \"wasi_snapshot_preview1\": asmLibraryArg };\n function receiveInstance(instance, module2) {\n var exports3 = instance.exports;\n Module[\"asm\"] = exports3;\n registerTlsInit(Module[\"asm\"][\"emscripten_tls_init\"]);\n wasmTable = Module[\"asm\"][\"__indirect_function_table\"];\n addOnInit(Module[\"asm\"][\"__wasm_call_ctors\"]);\n wasmModule = module2;\n if (!ENVIRONMENT_IS_PTHREAD) {\n var numWorkersToLoad = PThread.unusedWorkers.length;\n PThread.unusedWorkers.forEach(function(w) {\n PThread.loadWasmModuleToWorker(w, function() {\n if (!--numWorkersToLoad)\n removeRunDependency(\"wasm-instantiate\");\n });\n });\n }\n }\n if (!ENVIRONMENT_IS_PTHREAD) {\n addRunDependency(\"wasm-instantiate\");\n }\n function receiveInstantiationResult(result) {\n receiveInstance(result[\"instance\"], result[\"module\"]);\n }\n function instantiateArrayBuffer(receiver) {\n return getBinaryPromise().then(function(binary) {\n return WebAssembly.instantiate(binary, info);\n }).then(function(instance) {\n return instance;\n }).then(receiver, function(reason) {\n err(\"failed to asynchronously prepare wasm: \" + reason);\n abort(reason);\n });\n }\n function instantiateAsync() {\n if (!wasmBinary && typeof WebAssembly.instantiateStreaming === \"function\" && !isDataURI(wasmBinaryFile) && !isFileURI(wasmBinaryFile) && typeof fetch === \"function\") {\n return fetch(wasmBinaryFile, { credentials: \"same-origin\" }).then(function(response) {\n var result = WebAssembly.instantiateStreaming(response, info);\n return result.then(receiveInstantiationResult, function(reason) {\n err(\"wasm streaming compile failed: \" + reason);\n err(\"falling back to ArrayBuffer instantiation\");\n return instantiateArrayBuffer(receiveInstantiationResult);\n });\n });\n } else {\n return instantiateArrayBuffer(receiveInstantiationResult);\n }\n }\n if (Module[\"instantiateWasm\"]) {\n try {\n var exports2 = Module[\"instantiateWasm\"](info, receiveInstance);\n return exports2;\n } catch (e2) {\n err(\"Module.instantiateWasm callback failed with error: \" + e2);\n return false;\n }\n }\n instantiateAsync().catch(readyPromiseReject);\n return {};\n }\n var tempDouble;\n var tempI64;\n var ASM_CONSTS = {};\n function callRuntimeCallbacks(callbacks2) {\n while (callbacks2.length > 0) {\n var callback = callbacks2.shift();\n if (typeof callback == \"function\") {\n callback(Module);\n continue;\n }\n var func2 = callback.func;\n if (typeof func2 === \"number\") {\n if (callback.arg === void 0) {\n getWasmTableEntry(func2)();\n } else {\n getWasmTableEntry(func2)(callback.arg);\n }\n } else {\n func2(callback.arg === void 0 ? null : callback.arg);\n }\n }\n }\n function withStackSave(f) {\n var stack2 = stackSave();\n var ret = f();\n stackRestore(stack2);\n return ret;\n }\n function demangle(func2) {\n return func2;\n }\n function demangleAll(text) {\n var regex = /\\b_Z[\\w\\d_]+/g;\n return text.replace(regex, function(x) {\n var y = demangle(x);\n return x === y ? x : y + \" [\" + x + \"]\";\n });\n }\n function killThread(pthread_ptr) {\n GROWABLE_HEAP_I32()[pthread_ptr >> 2] = 0;\n var pthread = PThread.pthreads[pthread_ptr];\n delete PThread.pthreads[pthread_ptr];\n pthread.worker.terminate();\n __emscripten_thread_free_data(pthread_ptr);\n PThread.runningWorkers.splice(PThread.runningWorkers.indexOf(pthread.worker), 1);\n pthread.worker.pthread = void 0;\n }\n function cancelThread(pthread_ptr) {\n var pthread = PThread.pthreads[pthread_ptr];\n pthread.worker.postMessage({ \"cmd\": \"cancel\" });\n }\n function cleanupThread(pthread_ptr) {\n var pthread = PThread.pthreads[pthread_ptr];\n if (pthread) {\n GROWABLE_HEAP_I32()[pthread_ptr >> 2] = 0;\n var worker = pthread.worker;\n PThread.returnWorkerToPool(worker);\n }\n }\n function _exit(status) {\n exit(status);\n }\n function handleException(e2) {\n if (e2 instanceof ExitStatus || e2 == \"unwind\") {\n return EXITSTATUS;\n }\n quit_(1, e2);\n }\n var PThread = { unusedWorkers: [], runningWorkers: [], tlsInitFunctions: [], init: function() {\n if (ENVIRONMENT_IS_PTHREAD) {\n PThread.initWorker();\n } else {\n PThread.initMainThread();\n }\n }, initMainThread: function() {\n var pthreadPoolSize = 8;\n for (var i2 = 0; i2 < pthreadPoolSize; ++i2) {\n PThread.allocateUnusedWorker();\n }\n }, initWorker: function() {\n noExitRuntime = false;\n }, pthreads: {}, setExitStatus: function(status) {\n EXITSTATUS = status;\n }, terminateAllThreads: function() {\n for (var t2 in PThread.pthreads) {\n var pthread = PThread.pthreads[t2];\n if (pthread && pthread.worker) {\n PThread.returnWorkerToPool(pthread.worker);\n }\n }\n for (var i2 = 0; i2 < PThread.unusedWorkers.length; ++i2) {\n var worker = PThread.unusedWorkers[i2];\n worker.terminate();\n }\n PThread.unusedWorkers = [];\n }, returnWorkerToPool: function(worker) {\n PThread.runWithoutMainThreadQueuedCalls(function() {\n delete PThread.pthreads[worker.pthread.threadInfoStruct];\n PThread.unusedWorkers.push(worker);\n PThread.runningWorkers.splice(PThread.runningWorkers.indexOf(worker), 1);\n __emscripten_thread_free_data(worker.pthread.threadInfoStruct);\n worker.pthread = void 0;\n });\n }, runWithoutMainThreadQueuedCalls: function(func2) {\n GROWABLE_HEAP_I32()[__emscripten_allow_main_runtime_queued_calls >> 2] = 0;\n try {\n func2();\n } finally {\n GROWABLE_HEAP_I32()[__emscripten_allow_main_runtime_queued_calls >> 2] = 1;\n }\n }, receiveObjectTransfer: function(data) {\n }, threadInit: function() {\n for (var i2 in PThread.tlsInitFunctions) {\n PThread.tlsInitFunctions[i2]();\n }\n }, loadWasmModuleToWorker: function(worker, onFinishedLoading) {\n worker.onmessage = (e2) => {\n var d = e2[\"data\"];\n var cmd = d[\"cmd\"];\n if (worker.pthread)\n PThread.currentProxiedOperationCallerThread = worker.pthread.threadInfoStruct;\n if (d[\"targetThread\"] && d[\"targetThread\"] != _pthread_self()) {\n var thread = PThread.pthreads[d.targetThread];\n if (thread) {\n thread.worker.postMessage(d, d[\"transferList\"]);\n } else {\n err('Internal error! Worker sent a message \"' + cmd + '\" to target pthread ' + d[\"targetThread\"] + \", but that thread no longer exists!\");\n }\n PThread.currentProxiedOperationCallerThread = void 0;\n return;\n }\n if (cmd === \"processQueuedMainThreadWork\") {\n _emscripten_main_thread_process_queued_calls();\n } else if (cmd === \"spawnThread\") {\n spawnThread(d);\n } else if (cmd === \"cleanupThread\") {\n cleanupThread(d[\"thread\"]);\n } else if (cmd === \"killThread\") {\n killThread(d[\"thread\"]);\n } else if (cmd === \"cancelThread\") {\n cancelThread(d[\"thread\"]);\n } else if (cmd === \"loaded\") {\n worker.loaded = true;\n if (onFinishedLoading)\n onFinishedLoading(worker);\n if (worker.runPthread) {\n worker.runPthread();\n delete worker.runPthread;\n }\n } else if (cmd === \"print\") {\n out(\"Thread \" + d[\"threadId\"] + \": \" + d[\"text\"]);\n } else if (cmd === \"printErr\") {\n err(\"Thread \" + d[\"threadId\"] + \": \" + d[\"text\"]);\n } else if (cmd === \"alert\") {\n alert(\"Thread \" + d[\"threadId\"] + \": \" + d[\"text\"]);\n } else if (d.target === \"setimmediate\") {\n worker.postMessage(d);\n } else if (cmd === \"onAbort\") {\n if (Module[\"onAbort\"]) {\n Module[\"onAbort\"](d[\"arg\"]);\n }\n } else {\n err(\"worker sent an unknown command \" + cmd);\n }\n PThread.currentProxiedOperationCallerThread = void 0;\n };\n worker.onerror = (e2) => {\n var message = \"worker sent an error!\";\n err(message + \" \" + e2.filename + \":\" + e2.lineno + \": \" + e2.message);\n throw e2;\n };\n if (ENVIRONMENT_IS_NODE) {\n worker.on(\"message\", function(data) {\n worker.onmessage({ data });\n });\n worker.on(\"error\", function(e2) {\n worker.onerror(e2);\n });\n worker.on(\"detachedExit\", function() {\n });\n }\n worker.postMessage({ \"cmd\": \"load\", \"urlOrBlob\": Module[\"mainScriptUrlOrBlob\"] || _scriptDir, \"wasmMemory\": wasmMemory, \"wasmModule\": wasmModule });\n }, allocateUnusedWorker: function() {\n var pthreadMainJs = locateFile(\"tfjs-backend-wasm-threaded-simd.worker.js\");\n PThread.unusedWorkers.push(new Worker(pthreadMainJs));\n }, getNewWorker: function() {\n if (PThread.unusedWorkers.length == 0) {\n PThread.allocateUnusedWorker();\n PThread.loadWasmModuleToWorker(PThread.unusedWorkers[0]);\n }\n return PThread.unusedWorkers.pop();\n } };\n function establishStackSpace() {\n var pthread_ptr = _pthread_self();\n var stackTop = GROWABLE_HEAP_I32()[pthread_ptr + 44 >> 2];\n var stackSize = GROWABLE_HEAP_I32()[pthread_ptr + 48 >> 2];\n var stackMax = stackTop - stackSize;\n _emscripten_stack_set_limits(stackTop, stackMax);\n stackRestore(stackTop);\n }\n Module[\"establishStackSpace\"] = establishStackSpace;\n function exitOnMainThread(returnCode) {\n if (ENVIRONMENT_IS_PTHREAD)\n return _emscripten_proxy_to_main_thread_js(1, 0, returnCode);\n try {\n _exit(returnCode);\n } catch (e2) {\n handleException(e2);\n }\n }\n var wasmTableMirror = [];\n function getWasmTableEntry(funcPtr) {\n var func2 = wasmTableMirror[funcPtr];\n if (!func2) {\n if (funcPtr >= wasmTableMirror.length)\n wasmTableMirror.length = funcPtr + 1;\n wasmTableMirror[funcPtr] = func2 = wasmTable.get(funcPtr);\n }\n return func2;\n }\n function invokeEntryPoint(ptr, arg) {\n return getWasmTableEntry(ptr)(arg);\n }\n Module[\"invokeEntryPoint\"] = invokeEntryPoint;\n function jsStackTrace() {\n var error = new Error();\n if (!error.stack) {\n try {\n throw new Error();\n } catch (e2) {\n error = e2;\n }\n if (!error.stack) {\n return \"(no stack trace available)\";\n }\n }\n return error.stack.toString();\n }\n function registerTlsInit(tlsInitFunc, moduleExports, metadata) {\n PThread.tlsInitFunctions.push(tlsInitFunc);\n }\n function setWasmTableEntry(idx, func2) {\n wasmTable.set(idx, func2);\n wasmTableMirror[idx] = func2;\n }\n var _emscripten_get_now;\n if (ENVIRONMENT_IS_NODE) {\n _emscripten_get_now = () => {\n var t2 = process[\"hrtime\"]();\n return t2[0] * 1e3 + t2[1] / 1e6;\n };\n } else if (ENVIRONMENT_IS_PTHREAD) {\n _emscripten_get_now = () => performance.now() - Module[\"__performance_now_clock_drift\"];\n } else\n _emscripten_get_now = () => performance.now();\n var _emscripten_get_now_is_monotonic = true;\n function setErrNo(value) {\n GROWABLE_HEAP_I32()[___errno_location() >> 2] = value;\n return value;\n }\n function _clock_gettime(clk_id, tp) {\n var now2;\n if (clk_id === 0) {\n now2 = Date.now();\n } else if ((clk_id === 1 || clk_id === 4) && _emscripten_get_now_is_monotonic) {\n now2 = _emscripten_get_now();\n } else {\n setErrNo(28);\n return -1;\n }\n GROWABLE_HEAP_I32()[tp >> 2] = now2 / 1e3 | 0;\n GROWABLE_HEAP_I32()[tp + 4 >> 2] = now2 % 1e3 * 1e3 * 1e3 | 0;\n return 0;\n }\n function ___clock_gettime(a0, a12) {\n return _clock_gettime(a0, a12);\n }\n function ___emscripten_init_main_thread_js(tb) {\n __emscripten_thread_init(tb, !ENVIRONMENT_IS_WORKER, 1, !ENVIRONMENT_IS_WEB);\n PThread.threadInit();\n }\n function ___emscripten_thread_cleanup(thread) {\n if (!ENVIRONMENT_IS_PTHREAD)\n cleanupThread(thread);\n else\n postMessage({ \"cmd\": \"cleanupThread\", \"thread\": thread });\n }\n function spawnThread(threadParams) {\n var worker = PThread.getNewWorker();\n if (!worker) {\n return 6;\n }\n PThread.runningWorkers.push(worker);\n var pthread = PThread.pthreads[threadParams.pthread_ptr] = { worker, threadInfoStruct: threadParams.pthread_ptr };\n worker.pthread = pthread;\n var msg = { \"cmd\": \"run\", \"start_routine\": threadParams.startRoutine, \"arg\": threadParams.arg, \"threadInfoStruct\": threadParams.pthread_ptr };\n worker.runPthread = () => {\n msg.time = performance.now();\n worker.postMessage(msg, threadParams.transferList);\n };\n if (worker.loaded) {\n worker.runPthread();\n delete worker.runPthread;\n }\n return 0;\n }\n function ___pthread_create_js(pthread_ptr, attr, start_routine, arg) {\n if (typeof SharedArrayBuffer === \"undefined\") {\n err(\"Current environment does not support SharedArrayBuffer, pthreads are not available!\");\n return 6;\n }\n var transferList = [];\n var error = 0;\n if (ENVIRONMENT_IS_PTHREAD && (transferList.length === 0 || error)) {\n return _emscripten_sync_run_in_main_thread_4(687865856, pthread_ptr, attr, start_routine, arg);\n }\n if (error)\n return error;\n var threadParams = { startRoutine: start_routine, pthread_ptr, arg, transferList };\n if (ENVIRONMENT_IS_PTHREAD) {\n threadParams.cmd = \"spawnThread\";\n postMessage(threadParams, transferList);\n return 0;\n }\n return spawnThread(threadParams);\n }\n function __emscripten_default_pthread_stack_size() {\n return 2097152;\n }\n function __emscripten_notify_thread_queue(targetThreadId, mainThreadId) {\n if (targetThreadId == mainThreadId) {\n postMessage({ \"cmd\": \"processQueuedMainThreadWork\" });\n } else if (ENVIRONMENT_IS_PTHREAD) {\n postMessage({ \"targetThread\": targetThreadId, \"cmd\": \"processThreadQueue\" });\n } else {\n var pthread = PThread.pthreads[targetThreadId];\n var worker = pthread && pthread.worker;\n if (!worker) {\n return;\n }\n worker.postMessage({ \"cmd\": \"processThreadQueue\" });\n }\n return 1;\n }\n function _abort() {\n abort(\"\");\n }\n function _emscripten_check_blocking_allowed() {\n if (ENVIRONMENT_IS_NODE)\n return;\n if (ENVIRONMENT_IS_WORKER)\n return;\n warnOnce(\"Blocking on the main thread is very dangerous, see https://emscripten.org/docs/porting/pthreads.html#blocking-on-the-main-browser-thread\");\n }\n function _emscripten_get_heap_max() {\n return 2147483648;\n }\n function _emscripten_memcpy_big(dest, src, num) {\n GROWABLE_HEAP_U8().copyWithin(dest, src, src + num);\n }\n function _emscripten_num_logical_cores() {\n if (ENVIRONMENT_IS_NODE)\n return require_os().cpus().length;\n return navigator[\"hardwareConcurrency\"];\n }\n function _emscripten_proxy_to_main_thread_js(index, sync) {\n var numCallArgs = arguments.length - 2;\n var outerArgs = arguments;\n return withStackSave(function() {\n var serializedNumCallArgs = numCallArgs;\n var args = stackAlloc(serializedNumCallArgs * 8);\n var b = args >> 3;\n for (var i2 = 0; i2 < numCallArgs; i2++) {\n var arg = outerArgs[2 + i2];\n GROWABLE_HEAP_F64()[b + i2] = arg;\n }\n return _emscripten_run_in_main_runtime_thread_js(index, serializedNumCallArgs, args, sync);\n });\n }\n var _emscripten_receive_on_main_thread_js_callArgs = [];\n function _emscripten_receive_on_main_thread_js(index, numCallArgs, args) {\n _emscripten_receive_on_main_thread_js_callArgs.length = numCallArgs;\n var b = args >> 3;\n for (var i2 = 0; i2 < numCallArgs; i2++) {\n _emscripten_receive_on_main_thread_js_callArgs[i2] = GROWABLE_HEAP_F64()[b + i2];\n }\n var isEmAsmConst = index < 0;\n var func2 = !isEmAsmConst ? proxiedFunctionTable[index] : ASM_CONSTS[-index - 1];\n return func2.apply(null, _emscripten_receive_on_main_thread_js_callArgs);\n }\n function emscripten_realloc_buffer(size) {\n try {\n wasmMemory.grow(size - buffer2.byteLength + 65535 >>> 16);\n updateGlobalBufferAndViews(wasmMemory.buffer);\n return 1;\n } catch (e2) {\n }\n }\n function _emscripten_resize_heap(requestedSize) {\n var oldSize = GROWABLE_HEAP_U8().length;\n requestedSize = requestedSize >>> 0;\n if (requestedSize <= oldSize) {\n return false;\n }\n var maxHeapSize = _emscripten_get_heap_max();\n if (requestedSize > maxHeapSize) {\n return false;\n }\n for (var cutDown = 1; cutDown <= 4; cutDown *= 2) {\n var overGrownHeapSize = oldSize * (1 + 0.2 / cutDown);\n overGrownHeapSize = Math.min(overGrownHeapSize, requestedSize + 100663296);\n var newSize = Math.min(maxHeapSize, alignUp(Math.max(requestedSize, overGrownHeapSize), 65536));\n var replacement = emscripten_realloc_buffer(newSize);\n if (replacement) {\n return true;\n }\n }\n return false;\n }\n var JSEvents = { inEventHandler: 0, removeAllEventListeners: function() {\n for (var i2 = JSEvents.eventHandlers.length - 1; i2 >= 0; --i2) {\n JSEvents._removeHandler(i2);\n }\n JSEvents.eventHandlers = [];\n JSEvents.deferredCalls = [];\n }, registerRemoveEventListeners: function() {\n if (!JSEvents.removeEventListenersRegistered) {\n __ATEXIT__.push(JSEvents.removeAllEventListeners);\n JSEvents.removeEventListenersRegistered = true;\n }\n }, deferredCalls: [], deferCall: function(targetFunction, precedence, argsList) {\n function arraysHaveEqualContent(arrA, arrB) {\n if (arrA.length != arrB.length)\n return false;\n for (var i3 in arrA) {\n if (arrA[i3] != arrB[i3])\n return false;\n }\n return true;\n }\n for (var i2 in JSEvents.deferredCalls) {\n var call = JSEvents.deferredCalls[i2];\n if (call.targetFunction == targetFunction && arraysHaveEqualContent(call.argsList, argsList)) {\n return;\n }\n }\n JSEvents.deferredCalls.push({ targetFunction, precedence, argsList });\n JSEvents.deferredCalls.sort(function(x, y) {\n return x.precedence < y.precedence;\n });\n }, removeDeferredCalls: function(targetFunction) {\n for (var i2 = 0; i2 < JSEvents.deferredCalls.length; ++i2) {\n if (JSEvents.deferredCalls[i2].targetFunction == targetFunction) {\n JSEvents.deferredCalls.splice(i2, 1);\n --i2;\n }\n }\n }, canPerformEventHandlerRequests: function() {\n return JSEvents.inEventHandler && JSEvents.currentEventHandler.allowsDeferredCalls;\n }, runDeferredCalls: function() {\n if (!JSEvents.canPerformEventHandlerRequests()) {\n return;\n }\n for (var i2 = 0; i2 < JSEvents.deferredCalls.length; ++i2) {\n var call = JSEvents.deferredCalls[i2];\n JSEvents.deferredCalls.splice(i2, 1);\n --i2;\n call.targetFunction.apply(null, call.argsList);\n }\n }, eventHandlers: [], removeAllHandlersOnTarget: function(target, eventTypeString) {\n for (var i2 = 0; i2 < JSEvents.eventHandlers.length; ++i2) {\n if (JSEvents.eventHandlers[i2].target == target && (!eventTypeString || eventTypeString == JSEvents.eventHandlers[i2].eventTypeString)) {\n JSEvents._removeHandler(i2--);\n }\n }\n }, _removeHandler: function(i2) {\n var h = JSEvents.eventHandlers[i2];\n h.target.removeEventListener(h.eventTypeString, h.eventListenerFunc, h.useCapture);\n JSEvents.eventHandlers.splice(i2, 1);\n }, registerOrRemoveHandler: function(eventHandler) {\n var jsEventHandler = function jsEventHandler2(event) {\n ++JSEvents.inEventHandler;\n JSEvents.currentEventHandler = eventHandler;\n JSEvents.runDeferredCalls();\n eventHandler.handlerFunc(event);\n JSEvents.runDeferredCalls();\n --JSEvents.inEventHandler;\n };\n if (eventHandler.callbackfunc) {\n eventHandler.eventListenerFunc = jsEventHandler;\n eventHandler.target.addEventListener(eventHandler.eventTypeString, jsEventHandler, eventHandler.useCapture);\n JSEvents.eventHandlers.push(eventHandler);\n JSEvents.registerRemoveEventListeners();\n } else {\n for (var i2 = 0; i2 < JSEvents.eventHandlers.length; ++i2) {\n if (JSEvents.eventHandlers[i2].target == eventHandler.target && JSEvents.eventHandlers[i2].eventTypeString == eventHandler.eventTypeString) {\n JSEvents._removeHandler(i2--);\n }\n }\n }\n }, queueEventHandlerOnThread_iiii: function(targetThread, eventHandlerFunc, eventTypeId, eventData, userData) {\n withStackSave(function() {\n var varargs = stackAlloc(12);\n GROWABLE_HEAP_I32()[varargs >> 2] = eventTypeId;\n GROWABLE_HEAP_I32()[varargs + 4 >> 2] = eventData;\n GROWABLE_HEAP_I32()[varargs + 8 >> 2] = userData;\n _emscripten_dispatch_to_thread_(targetThread, 637534208, eventHandlerFunc, eventData, varargs);\n });\n }, getTargetThreadForEventCallback: function(targetThread) {\n switch (targetThread) {\n case 1:\n return 0;\n case 2:\n return PThread.currentProxiedOperationCallerThread;\n default:\n return targetThread;\n }\n }, getNodeNameForTarget: function(target) {\n if (!target)\n return \"\";\n if (target == window)\n return \"#window\";\n if (target == screen)\n return \"#screen\";\n return target && target.nodeName ? target.nodeName : \"\";\n }, fullscreenEnabled: function() {\n return document.fullscreenEnabled || document.webkitFullscreenEnabled;\n } };\n function stringToNewUTF8(jsString) {\n var length = lengthBytesUTF8(jsString) + 1;\n var cString = _malloc(length);\n stringToUTF8(jsString, cString, length);\n return cString;\n }\n function _emscripten_set_offscreencanvas_size_on_target_thread_js(targetThread, targetCanvas, width, height) {\n withStackSave(function() {\n var varargs = stackAlloc(12);\n var targetCanvasPtr = 0;\n if (targetCanvas) {\n targetCanvasPtr = stringToNewUTF8(targetCanvas);\n }\n GROWABLE_HEAP_I32()[varargs >> 2] = targetCanvasPtr;\n GROWABLE_HEAP_I32()[varargs + 4 >> 2] = width;\n GROWABLE_HEAP_I32()[varargs + 8 >> 2] = height;\n _emscripten_dispatch_to_thread_(targetThread, 657457152, 0, targetCanvasPtr, varargs);\n });\n }\n function _emscripten_set_offscreencanvas_size_on_target_thread(targetThread, targetCanvas, width, height) {\n targetCanvas = targetCanvas ? UTF8ToString(targetCanvas) : \"\";\n _emscripten_set_offscreencanvas_size_on_target_thread_js(targetThread, targetCanvas, width, height);\n }\n function maybeCStringToJsString(cString) {\n return cString > 2 ? UTF8ToString(cString) : cString;\n }\n var specialHTMLTargets = [0, typeof document !== \"undefined\" ? document : 0, typeof window !== \"undefined\" ? window : 0];\n function findEventTarget(target) {\n target = maybeCStringToJsString(target);\n var domElement = specialHTMLTargets[target] || (typeof document !== \"undefined\" ? document.querySelector(target) : void 0);\n return domElement;\n }\n function findCanvasEventTarget(target) {\n return findEventTarget(target);\n }\n function _emscripten_set_canvas_element_size_calling_thread(target, width, height) {\n var canvas = findCanvasEventTarget(target);\n if (!canvas)\n return -4;\n if (canvas.canvasSharedPtr) {\n GROWABLE_HEAP_I32()[canvas.canvasSharedPtr >> 2] = width;\n GROWABLE_HEAP_I32()[canvas.canvasSharedPtr + 4 >> 2] = height;\n }\n if (canvas.offscreenCanvas || !canvas.controlTransferredOffscreen) {\n if (canvas.offscreenCanvas)\n canvas = canvas.offscreenCanvas;\n var autoResizeViewport = false;\n if (canvas.GLctxObject && canvas.GLctxObject.GLctx) {\n var prevViewport = canvas.GLctxObject.GLctx.getParameter(2978);\n autoResizeViewport = prevViewport[0] === 0 && prevViewport[1] === 0 && prevViewport[2] === canvas.width && prevViewport[3] === canvas.height;\n }\n canvas.width = width;\n canvas.height = height;\n if (autoResizeViewport) {\n canvas.GLctxObject.GLctx.viewport(0, 0, width, height);\n }\n } else if (canvas.canvasSharedPtr) {\n var targetThread = GROWABLE_HEAP_I32()[canvas.canvasSharedPtr + 8 >> 2];\n _emscripten_set_offscreencanvas_size_on_target_thread(targetThread, target, width, height);\n return 1;\n } else {\n return -4;\n }\n return 0;\n }\n function _emscripten_set_canvas_element_size_main_thread(target, width, height) {\n if (ENVIRONMENT_IS_PTHREAD)\n return _emscripten_proxy_to_main_thread_js(2, 1, target, width, height);\n return _emscripten_set_canvas_element_size_calling_thread(target, width, height);\n }\n function _emscripten_set_canvas_element_size(target, width, height) {\n var canvas = findCanvasEventTarget(target);\n if (canvas) {\n return _emscripten_set_canvas_element_size_calling_thread(target, width, height);\n } else {\n return _emscripten_set_canvas_element_size_main_thread(target, width, height);\n }\n }\n function _emscripten_unwind_to_js_event_loop() {\n throw \"unwind\";\n }\n function __webgl_enable_ANGLE_instanced_arrays(ctx) {\n var ext = ctx.getExtension(\"ANGLE_instanced_arrays\");\n if (ext) {\n ctx[\"vertexAttribDivisor\"] = function(index, divisor) {\n ext[\"vertexAttribDivisorANGLE\"](index, divisor);\n };\n ctx[\"drawArraysInstanced\"] = function(mode, first, count2, primcount) {\n ext[\"drawArraysInstancedANGLE\"](mode, first, count2, primcount);\n };\n ctx[\"drawElementsInstanced\"] = function(mode, count2, type, indices, primcount) {\n ext[\"drawElementsInstancedANGLE\"](mode, count2, type, indices, primcount);\n };\n return 1;\n }\n }\n function __webgl_enable_OES_vertex_array_object(ctx) {\n var ext = ctx.getExtension(\"OES_vertex_array_object\");\n if (ext) {\n ctx[\"createVertexArray\"] = function() {\n return ext[\"createVertexArrayOES\"]();\n };\n ctx[\"deleteVertexArray\"] = function(vao) {\n ext[\"deleteVertexArrayOES\"](vao);\n };\n ctx[\"bindVertexArray\"] = function(vao) {\n ext[\"bindVertexArrayOES\"](vao);\n };\n ctx[\"isVertexArray\"] = function(vao) {\n return ext[\"isVertexArrayOES\"](vao);\n };\n return 1;\n }\n }\n function __webgl_enable_WEBGL_draw_buffers(ctx) {\n var ext = ctx.getExtension(\"WEBGL_draw_buffers\");\n if (ext) {\n ctx[\"drawBuffers\"] = function(n2, bufs) {\n ext[\"drawBuffersWEBGL\"](n2, bufs);\n };\n return 1;\n }\n }\n function __webgl_enable_WEBGL_multi_draw(ctx) {\n return !!(ctx.multiDrawWebgl = ctx.getExtension(\"WEBGL_multi_draw\"));\n }\n var GL = { counter: 1, buffers: [], programs: [], framebuffers: [], renderbuffers: [], textures: [], shaders: [], vaos: [], contexts: {}, offscreenCanvases: {}, queries: [], stringCache: {}, unpackAlignment: 4, recordError: function recordError(errorCode) {\n if (!GL.lastError) {\n GL.lastError = errorCode;\n }\n }, getNewId: function(table) {\n var ret = GL.counter++;\n for (var i2 = table.length; i2 < ret; i2++) {\n table[i2] = null;\n }\n return ret;\n }, getSource: function(shader, count2, string2, length) {\n var source = \"\";\n for (var i2 = 0; i2 < count2; ++i2) {\n var len = length ? GROWABLE_HEAP_I32()[length + i2 * 4 >> 2] : -1;\n source += UTF8ToString(GROWABLE_HEAP_I32()[string2 + i2 * 4 >> 2], len < 0 ? void 0 : len);\n }\n return source;\n }, createContext: function(canvas, webGLContextAttributes) {\n if (!canvas.getContextSafariWebGL2Fixed) {\n canvas.getContextSafariWebGL2Fixed = canvas.getContext;\n canvas.getContext = function(ver, attrs) {\n var gl = canvas.getContextSafariWebGL2Fixed(ver, attrs);\n return ver == \"webgl\" == gl instanceof WebGLRenderingContext ? gl : null;\n };\n }\n var ctx = canvas.getContext(\"webgl\", webGLContextAttributes);\n if (!ctx)\n return 0;\n var handle = GL.registerContext(ctx, webGLContextAttributes);\n return handle;\n }, registerContext: function(ctx, webGLContextAttributes) {\n var handle = _malloc(8);\n GROWABLE_HEAP_I32()[handle + 4 >> 2] = _pthread_self();\n var context = { handle, attributes: webGLContextAttributes, version: webGLContextAttributes.majorVersion, GLctx: ctx };\n if (ctx.canvas)\n ctx.canvas.GLctxObject = context;\n GL.contexts[handle] = context;\n if (typeof webGLContextAttributes.enableExtensionsByDefault === \"undefined\" || webGLContextAttributes.enableExtensionsByDefault) {\n GL.initExtensions(context);\n }\n return handle;\n }, makeContextCurrent: function(contextHandle) {\n GL.currentContext = GL.contexts[contextHandle];\n Module.ctx = GLctx = GL.currentContext && GL.currentContext.GLctx;\n return !(contextHandle && !GLctx);\n }, getContext: function(contextHandle) {\n return GL.contexts[contextHandle];\n }, deleteContext: function(contextHandle) {\n if (GL.currentContext === GL.contexts[contextHandle])\n GL.currentContext = null;\n if (typeof JSEvents === \"object\")\n JSEvents.removeAllHandlersOnTarget(GL.contexts[contextHandle].GLctx.canvas);\n if (GL.contexts[contextHandle] && GL.contexts[contextHandle].GLctx.canvas)\n GL.contexts[contextHandle].GLctx.canvas.GLctxObject = void 0;\n _free(GL.contexts[contextHandle].handle);\n GL.contexts[contextHandle] = null;\n }, initExtensions: function(context) {\n if (!context)\n context = GL.currentContext;\n if (context.initExtensionsDone)\n return;\n context.initExtensionsDone = true;\n var GLctx2 = context.GLctx;\n __webgl_enable_ANGLE_instanced_arrays(GLctx2);\n __webgl_enable_OES_vertex_array_object(GLctx2);\n __webgl_enable_WEBGL_draw_buffers(GLctx2);\n {\n GLctx2.disjointTimerQueryExt = GLctx2.getExtension(\"EXT_disjoint_timer_query\");\n }\n __webgl_enable_WEBGL_multi_draw(GLctx2);\n var exts = GLctx2.getSupportedExtensions() || [];\n exts.forEach(function(ext) {\n if (!ext.includes(\"lose_context\") && !ext.includes(\"debug\")) {\n GLctx2.getExtension(ext);\n }\n });\n } };\n var __emscripten_webgl_power_preferences = [\"default\", \"low-power\", \"high-performance\"];\n function _emscripten_webgl_do_create_context(target, attributes) {\n var a = attributes >> 2;\n var powerPreference = GROWABLE_HEAP_I32()[a + (24 >> 2)];\n var contextAttributes = { \"alpha\": !!GROWABLE_HEAP_I32()[a + (0 >> 2)], \"depth\": !!GROWABLE_HEAP_I32()[a + (4 >> 2)], \"stencil\": !!GROWABLE_HEAP_I32()[a + (8 >> 2)], \"antialias\": !!GROWABLE_HEAP_I32()[a + (12 >> 2)], \"premultipliedAlpha\": !!GROWABLE_HEAP_I32()[a + (16 >> 2)], \"preserveDrawingBuffer\": !!GROWABLE_HEAP_I32()[a + (20 >> 2)], \"powerPreference\": __emscripten_webgl_power_preferences[powerPreference], \"failIfMajorPerformanceCaveat\": !!GROWABLE_HEAP_I32()[a + (28 >> 2)], majorVersion: GROWABLE_HEAP_I32()[a + (32 >> 2)], minorVersion: GROWABLE_HEAP_I32()[a + (36 >> 2)], enableExtensionsByDefault: GROWABLE_HEAP_I32()[a + (40 >> 2)], explicitSwapControl: GROWABLE_HEAP_I32()[a + (44 >> 2)], proxyContextToMainThread: GROWABLE_HEAP_I32()[a + (48 >> 2)], renderViaOffscreenBackBuffer: GROWABLE_HEAP_I32()[a + (52 >> 2)] };\n var canvas = findCanvasEventTarget(target);\n if (!canvas) {\n return 0;\n }\n if (contextAttributes.explicitSwapControl) {\n return 0;\n }\n var contextHandle = GL.createContext(canvas, contextAttributes);\n return contextHandle;\n }\n function _emscripten_webgl_create_context(a0, a12) {\n return _emscripten_webgl_do_create_context(a0, a12);\n }\n var SYSCALLS = { mappings: {}, buffers: [null, [], []], printChar: function(stream, curr) {\n var buffer3 = SYSCALLS.buffers[stream];\n if (curr === 0 || curr === 10) {\n (stream === 1 ? out : err)(UTF8ArrayToString(buffer3, 0));\n buffer3.length = 0;\n } else {\n buffer3.push(curr);\n }\n }, varargs: void 0, get: function() {\n SYSCALLS.varargs += 4;\n var ret = GROWABLE_HEAP_I32()[SYSCALLS.varargs - 4 >> 2];\n return ret;\n }, getStr: function(ptr) {\n var ret = UTF8ToString(ptr);\n return ret;\n }, get64: function(low, high) {\n return low;\n } };\n function _fd_close(fd) {\n if (ENVIRONMENT_IS_PTHREAD)\n return _emscripten_proxy_to_main_thread_js(3, 1, fd);\n return 0;\n }\n function _fd_seek(fd, offset_low, offset_high, whence, newOffset) {\n if (ENVIRONMENT_IS_PTHREAD)\n return _emscripten_proxy_to_main_thread_js(4, 1, fd, offset_low, offset_high, whence, newOffset);\n }\n function _fd_write(fd, iov, iovcnt, pnum) {\n if (ENVIRONMENT_IS_PTHREAD)\n return _emscripten_proxy_to_main_thread_js(5, 1, fd, iov, iovcnt, pnum);\n var num = 0;\n for (var i2 = 0; i2 < iovcnt; i2++) {\n var ptr = GROWABLE_HEAP_I32()[iov >> 2];\n var len = GROWABLE_HEAP_I32()[iov + 4 >> 2];\n iov += 8;\n for (var j = 0; j < len; j++) {\n SYSCALLS.printChar(fd, GROWABLE_HEAP_U8()[ptr + j]);\n }\n num += len;\n }\n GROWABLE_HEAP_I32()[pnum >> 2] = num;\n return 0;\n }\n function _setTempRet0(val) {\n setTempRet0(val);\n }\n PThread.init();\n var GLctx;\n var proxiedFunctionTable = [null, exitOnMainThread, _emscripten_set_canvas_element_size_main_thread, _fd_close, _fd_seek, _fd_write];\n var ASSERTIONS = false;\n var asmLibraryArg = { \"__clock_gettime\": ___clock_gettime, \"__emscripten_init_main_thread_js\": ___emscripten_init_main_thread_js, \"__emscripten_thread_cleanup\": ___emscripten_thread_cleanup, \"__pthread_create_js\": ___pthread_create_js, \"_emscripten_default_pthread_stack_size\": __emscripten_default_pthread_stack_size, \"_emscripten_notify_thread_queue\": __emscripten_notify_thread_queue, \"abort\": _abort, \"emscripten_check_blocking_allowed\": _emscripten_check_blocking_allowed, \"emscripten_get_heap_max\": _emscripten_get_heap_max, \"emscripten_get_now\": _emscripten_get_now, \"emscripten_memcpy_big\": _emscripten_memcpy_big, \"emscripten_num_logical_cores\": _emscripten_num_logical_cores, \"emscripten_receive_on_main_thread_js\": _emscripten_receive_on_main_thread_js, \"emscripten_resize_heap\": _emscripten_resize_heap, \"emscripten_set_canvas_element_size\": _emscripten_set_canvas_element_size, \"emscripten_unwind_to_js_event_loop\": _emscripten_unwind_to_js_event_loop, \"emscripten_webgl_create_context\": _emscripten_webgl_create_context, \"exit\": _exit, \"fd_close\": _fd_close, \"fd_seek\": _fd_seek, \"fd_write\": _fd_write, \"memory\": wasmMemory || Module[\"wasmMemory\"], \"setTempRet0\": _setTempRet0 };\n var asm = createWasm();\n var ___wasm_call_ctors = Module[\"___wasm_call_ctors\"] = function() {\n return (___wasm_call_ctors = Module[\"___wasm_call_ctors\"] = Module[\"asm\"][\"__wasm_call_ctors\"]).apply(null, arguments);\n };\n var _init = Module[\"_init\"] = function() {\n return (_init = Module[\"_init\"] = Module[\"asm\"][\"init\"]).apply(null, arguments);\n };\n var _init_with_threads_count = Module[\"_init_with_threads_count\"] = function() {\n return (_init_with_threads_count = Module[\"_init_with_threads_count\"] = Module[\"asm\"][\"init_with_threads_count\"]).apply(null, arguments);\n };\n var _get_threads_count = Module[\"_get_threads_count\"] = function() {\n return (_get_threads_count = Module[\"_get_threads_count\"] = Module[\"asm\"][\"get_threads_count\"]).apply(null, arguments);\n };\n var _register_tensor = Module[\"_register_tensor\"] = function() {\n return (_register_tensor = Module[\"_register_tensor\"] = Module[\"asm\"][\"register_tensor\"]).apply(null, arguments);\n };\n var _dispose_data = Module[\"_dispose_data\"] = function() {\n return (_dispose_data = Module[\"_dispose_data\"] = Module[\"asm\"][\"dispose_data\"]).apply(null, arguments);\n };\n var _dispose = Module[\"_dispose\"] = function() {\n return (_dispose = Module[\"_dispose\"] = Module[\"asm\"][\"dispose\"]).apply(null, arguments);\n };\n var _Abs = Module[\"_Abs\"] = function() {\n return (_Abs = Module[\"_Abs\"] = Module[\"asm\"][\"Abs\"]).apply(null, arguments);\n };\n var _Add = Module[\"_Add\"] = function() {\n return (_Add = Module[\"_Add\"] = Module[\"asm\"][\"Add\"]).apply(null, arguments);\n };\n var _AddN = Module[\"_AddN\"] = function() {\n return (_AddN = Module[\"_AddN\"] = Module[\"asm\"][\"AddN\"]).apply(null, arguments);\n };\n var _All = Module[\"_All\"] = function() {\n return (_All = Module[\"_All\"] = Module[\"asm\"][\"All\"]).apply(null, arguments);\n };\n var _Any = Module[\"_Any\"] = function() {\n return (_Any = Module[\"_Any\"] = Module[\"asm\"][\"Any\"]).apply(null, arguments);\n };\n var _ArgMax = Module[\"_ArgMax\"] = function() {\n return (_ArgMax = Module[\"_ArgMax\"] = Module[\"asm\"][\"ArgMax\"]).apply(null, arguments);\n };\n var _AvgPool = Module[\"_AvgPool\"] = function() {\n return (_AvgPool = Module[\"_AvgPool\"] = Module[\"asm\"][\"AvgPool\"]).apply(null, arguments);\n };\n var _BatchMatMul = Module[\"_BatchMatMul\"] = function() {\n return (_BatchMatMul = Module[\"_BatchMatMul\"] = Module[\"asm\"][\"BatchMatMul\"]).apply(null, arguments);\n };\n var _Ceil = Module[\"_Ceil\"] = function() {\n return (_Ceil = Module[\"_Ceil\"] = Module[\"asm\"][\"Ceil\"]).apply(null, arguments);\n };\n var _ClipByValue = Module[\"_ClipByValue\"] = function() {\n return (_ClipByValue = Module[\"_ClipByValue\"] = Module[\"asm\"][\"ClipByValue\"]).apply(null, arguments);\n };\n var _Conv2D = Module[\"_Conv2D\"] = function() {\n return (_Conv2D = Module[\"_Conv2D\"] = Module[\"asm\"][\"Conv2D\"]).apply(null, arguments);\n };\n var _Conv2DBackpropInput = Module[\"_Conv2DBackpropInput\"] = function() {\n return (_Conv2DBackpropInput = Module[\"_Conv2DBackpropInput\"] = Module[\"asm\"][\"Conv2DBackpropInput\"]).apply(null, arguments);\n };\n var _Cos = Module[\"_Cos\"] = function() {\n return (_Cos = Module[\"_Cos\"] = Module[\"asm\"][\"Cos\"]).apply(null, arguments);\n };\n var _Cosh = Module[\"_Cosh\"] = function() {\n return (_Cosh = Module[\"_Cosh\"] = Module[\"asm\"][\"Cosh\"]).apply(null, arguments);\n };\n var _CropAndResize = Module[\"_CropAndResize\"] = function() {\n return (_CropAndResize = Module[\"_CropAndResize\"] = Module[\"asm\"][\"CropAndResize\"]).apply(null, arguments);\n };\n var _Cumprod = Module[\"_Cumprod\"] = function() {\n return (_Cumprod = Module[\"_Cumprod\"] = Module[\"asm\"][\"Cumprod\"]).apply(null, arguments);\n };\n var _Cumsum = Module[\"_Cumsum\"] = function() {\n return (_Cumsum = Module[\"_Cumsum\"] = Module[\"asm\"][\"Cumsum\"]).apply(null, arguments);\n };\n var _DepthToSpace = Module[\"_DepthToSpace\"] = function() {\n return (_DepthToSpace = Module[\"_DepthToSpace\"] = Module[\"asm\"][\"DepthToSpace\"]).apply(null, arguments);\n };\n var _DepthwiseConv2dNative = Module[\"_DepthwiseConv2dNative\"] = function() {\n return (_DepthwiseConv2dNative = Module[\"_DepthwiseConv2dNative\"] = Module[\"asm\"][\"DepthwiseConv2dNative\"]).apply(null, arguments);\n };\n var _Elu = Module[\"_Elu\"] = function() {\n return (_Elu = Module[\"_Elu\"] = Module[\"asm\"][\"Elu\"]).apply(null, arguments);\n };\n var _Equal = Module[\"_Equal\"] = function() {\n return (_Equal = Module[\"_Equal\"] = Module[\"asm\"][\"Equal\"]).apply(null, arguments);\n };\n var _Exp = Module[\"_Exp\"] = function() {\n return (_Exp = Module[\"_Exp\"] = Module[\"asm\"][\"Exp\"]).apply(null, arguments);\n };\n var _FlipLeftRight = Module[\"_FlipLeftRight\"] = function() {\n return (_FlipLeftRight = Module[\"_FlipLeftRight\"] = Module[\"asm\"][\"FlipLeftRight\"]).apply(null, arguments);\n };\n var _Floor = Module[\"_Floor\"] = function() {\n return (_Floor = Module[\"_Floor\"] = Module[\"asm\"][\"Floor\"]).apply(null, arguments);\n };\n var _FloorDiv = Module[\"_FloorDiv\"] = function() {\n return (_FloorDiv = Module[\"_FloorDiv\"] = Module[\"asm\"][\"FloorDiv\"]).apply(null, arguments);\n };\n var _FusedBatchNorm = Module[\"_FusedBatchNorm\"] = function() {\n return (_FusedBatchNorm = Module[\"_FusedBatchNorm\"] = Module[\"asm\"][\"FusedBatchNorm\"]).apply(null, arguments);\n };\n var _FusedConv2D = Module[\"_FusedConv2D\"] = function() {\n return (_FusedConv2D = Module[\"_FusedConv2D\"] = Module[\"asm\"][\"FusedConv2D\"]).apply(null, arguments);\n };\n var _FusedDepthwiseConv2D = Module[\"_FusedDepthwiseConv2D\"] = function() {\n return (_FusedDepthwiseConv2D = Module[\"_FusedDepthwiseConv2D\"] = Module[\"asm\"][\"FusedDepthwiseConv2D\"]).apply(null, arguments);\n };\n var _Gather = Module[\"_Gather\"] = function() {\n return (_Gather = Module[\"_Gather\"] = Module[\"asm\"][\"Gather\"]).apply(null, arguments);\n };\n var _GatherNd = Module[\"_GatherNd\"] = function() {\n return (_GatherNd = Module[\"_GatherNd\"] = Module[\"asm\"][\"GatherNd\"]).apply(null, arguments);\n };\n var _Greater = Module[\"_Greater\"] = function() {\n return (_Greater = Module[\"_Greater\"] = Module[\"asm\"][\"Greater\"]).apply(null, arguments);\n };\n var _GreaterEqual = Module[\"_GreaterEqual\"] = function() {\n return (_GreaterEqual = Module[\"_GreaterEqual\"] = Module[\"asm\"][\"GreaterEqual\"]).apply(null, arguments);\n };\n var _LeakyRelu = Module[\"_LeakyRelu\"] = function() {\n return (_LeakyRelu = Module[\"_LeakyRelu\"] = Module[\"asm\"][\"LeakyRelu\"]).apply(null, arguments);\n };\n var _Less = Module[\"_Less\"] = function() {\n return (_Less = Module[\"_Less\"] = Module[\"asm\"][\"Less\"]).apply(null, arguments);\n };\n var _LessEqual = Module[\"_LessEqual\"] = function() {\n return (_LessEqual = Module[\"_LessEqual\"] = Module[\"asm\"][\"LessEqual\"]).apply(null, arguments);\n };\n var _Log = Module[\"_Log\"] = function() {\n return (_Log = Module[\"_Log\"] = Module[\"asm\"][\"Log\"]).apply(null, arguments);\n };\n var _LogicalAnd = Module[\"_LogicalAnd\"] = function() {\n return (_LogicalAnd = Module[\"_LogicalAnd\"] = Module[\"asm\"][\"LogicalAnd\"]).apply(null, arguments);\n };\n var _LogicalNot = Module[\"_LogicalNot\"] = function() {\n return (_LogicalNot = Module[\"_LogicalNot\"] = Module[\"asm\"][\"LogicalNot\"]).apply(null, arguments);\n };\n var _LogicalOr = Module[\"_LogicalOr\"] = function() {\n return (_LogicalOr = Module[\"_LogicalOr\"] = Module[\"asm\"][\"LogicalOr\"]).apply(null, arguments);\n };\n var _LogicalXor = Module[\"_LogicalXor\"] = function() {\n return (_LogicalXor = Module[\"_LogicalXor\"] = Module[\"asm\"][\"LogicalXor\"]).apply(null, arguments);\n };\n var _Max = Module[\"_Max\"] = function() {\n return (_Max = Module[\"_Max\"] = Module[\"asm\"][\"Max\"]).apply(null, arguments);\n };\n var _MaxPool = Module[\"_MaxPool\"] = function() {\n return (_MaxPool = Module[\"_MaxPool\"] = Module[\"asm\"][\"MaxPool\"]).apply(null, arguments);\n };\n var _Maximum = Module[\"_Maximum\"] = function() {\n return (_Maximum = Module[\"_Maximum\"] = Module[\"asm\"][\"Maximum\"]).apply(null, arguments);\n };\n var _Mean = Module[\"_Mean\"] = function() {\n return (_Mean = Module[\"_Mean\"] = Module[\"asm\"][\"Mean\"]).apply(null, arguments);\n };\n var _Min = Module[\"_Min\"] = function() {\n return (_Min = Module[\"_Min\"] = Module[\"asm\"][\"Min\"]).apply(null, arguments);\n };\n var _Minimum = Module[\"_Minimum\"] = function() {\n return (_Minimum = Module[\"_Minimum\"] = Module[\"asm\"][\"Minimum\"]).apply(null, arguments);\n };\n var _MirrorPad = Module[\"_MirrorPad\"] = function() {\n return (_MirrorPad = Module[\"_MirrorPad\"] = Module[\"asm\"][\"MirrorPad\"]).apply(null, arguments);\n };\n var _Multiply = Module[\"_Multiply\"] = function() {\n return (_Multiply = Module[\"_Multiply\"] = Module[\"asm\"][\"Multiply\"]).apply(null, arguments);\n };\n var _Neg = Module[\"_Neg\"] = function() {\n return (_Neg = Module[\"_Neg\"] = Module[\"asm\"][\"Neg\"]).apply(null, arguments);\n };\n var _NonMaxSuppressionV3 = Module[\"_NonMaxSuppressionV3\"] = function() {\n return (_NonMaxSuppressionV3 = Module[\"_NonMaxSuppressionV3\"] = Module[\"asm\"][\"NonMaxSuppressionV3\"]).apply(null, arguments);\n };\n var _NonMaxSuppressionV4 = Module[\"_NonMaxSuppressionV4\"] = function() {\n return (_NonMaxSuppressionV4 = Module[\"_NonMaxSuppressionV4\"] = Module[\"asm\"][\"NonMaxSuppressionV4\"]).apply(null, arguments);\n };\n var _NonMaxSuppressionV5 = Module[\"_NonMaxSuppressionV5\"] = function() {\n return (_NonMaxSuppressionV5 = Module[\"_NonMaxSuppressionV5\"] = Module[\"asm\"][\"NonMaxSuppressionV5\"]).apply(null, arguments);\n };\n var _NotEqual = Module[\"_NotEqual\"] = function() {\n return (_NotEqual = Module[\"_NotEqual\"] = Module[\"asm\"][\"NotEqual\"]).apply(null, arguments);\n };\n var _OneHot = Module[\"_OneHot\"] = function() {\n return (_OneHot = Module[\"_OneHot\"] = Module[\"asm\"][\"OneHot\"]).apply(null, arguments);\n };\n var _PadV2 = Module[\"_PadV2\"] = function() {\n return (_PadV2 = Module[\"_PadV2\"] = Module[\"asm\"][\"PadV2\"]).apply(null, arguments);\n };\n var _Pow = Module[\"_Pow\"] = function() {\n return (_Pow = Module[\"_Pow\"] = Module[\"asm\"][\"Pow\"]).apply(null, arguments);\n };\n var _Prelu = Module[\"_Prelu\"] = function() {\n return (_Prelu = Module[\"_Prelu\"] = Module[\"asm\"][\"Prelu\"]).apply(null, arguments);\n };\n var _Prod = Module[\"_Prod\"] = function() {\n return (_Prod = Module[\"_Prod\"] = Module[\"asm\"][\"Prod\"]).apply(null, arguments);\n };\n var _RealDiv = Module[\"_RealDiv\"] = function() {\n return (_RealDiv = Module[\"_RealDiv\"] = Module[\"asm\"][\"RealDiv\"]).apply(null, arguments);\n };\n var _Relu = Module[\"_Relu\"] = function() {\n return (_Relu = Module[\"_Relu\"] = Module[\"asm\"][\"Relu\"]).apply(null, arguments);\n };\n var _Relu6 = Module[\"_Relu6\"] = function() {\n return (_Relu6 = Module[\"_Relu6\"] = Module[\"asm\"][\"Relu6\"]).apply(null, arguments);\n };\n var _ResizeBilinear = Module[\"_ResizeBilinear\"] = function() {\n return (_ResizeBilinear = Module[\"_ResizeBilinear\"] = Module[\"asm\"][\"ResizeBilinear\"]).apply(null, arguments);\n };\n var _ResizeNearestNeighbor = Module[\"_ResizeNearestNeighbor\"] = function() {\n return (_ResizeNearestNeighbor = Module[\"_ResizeNearestNeighbor\"] = Module[\"asm\"][\"ResizeNearestNeighbor\"]).apply(null, arguments);\n };\n var _Reverse = Module[\"_Reverse\"] = function() {\n return (_Reverse = Module[\"_Reverse\"] = Module[\"asm\"][\"Reverse\"]).apply(null, arguments);\n };\n var _RotateWithOffset = Module[\"_RotateWithOffset\"] = function() {\n return (_RotateWithOffset = Module[\"_RotateWithOffset\"] = Module[\"asm\"][\"RotateWithOffset\"]).apply(null, arguments);\n };\n var _Round = Module[\"_Round\"] = function() {\n return (_Round = Module[\"_Round\"] = Module[\"asm\"][\"Round\"]).apply(null, arguments);\n };\n var _Rsqrt = Module[\"_Rsqrt\"] = function() {\n return (_Rsqrt = Module[\"_Rsqrt\"] = Module[\"asm\"][\"Rsqrt\"]).apply(null, arguments);\n };\n var _ScatterNd = Module[\"_ScatterNd\"] = function() {\n return (_ScatterNd = Module[\"_ScatterNd\"] = Module[\"asm\"][\"ScatterNd\"]).apply(null, arguments);\n };\n var _SelectV2 = Module[\"_SelectV2\"] = function() {\n return (_SelectV2 = Module[\"_SelectV2\"] = Module[\"asm\"][\"SelectV2\"]).apply(null, arguments);\n };\n var _Sigmoid = Module[\"_Sigmoid\"] = function() {\n return (_Sigmoid = Module[\"_Sigmoid\"] = Module[\"asm\"][\"Sigmoid\"]).apply(null, arguments);\n };\n var _Sin = Module[\"_Sin\"] = function() {\n return (_Sin = Module[\"_Sin\"] = Module[\"asm\"][\"Sin\"]).apply(null, arguments);\n };\n var _Softmax = Module[\"_Softmax\"] = function() {\n return (_Softmax = Module[\"_Softmax\"] = Module[\"asm\"][\"Softmax\"]).apply(null, arguments);\n };\n var _SparseFillEmptyRows = Module[\"_SparseFillEmptyRows\"] = function() {\n return (_SparseFillEmptyRows = Module[\"_SparseFillEmptyRows\"] = Module[\"asm\"][\"SparseFillEmptyRows\"]).apply(null, arguments);\n };\n var _SparseReshape = Module[\"_SparseReshape\"] = function() {\n return (_SparseReshape = Module[\"_SparseReshape\"] = Module[\"asm\"][\"SparseReshape\"]).apply(null, arguments);\n };\n var _SparseSegmentReduction = Module[\"_SparseSegmentReduction\"] = function() {\n return (_SparseSegmentReduction = Module[\"_SparseSegmentReduction\"] = Module[\"asm\"][\"SparseSegmentReduction\"]).apply(null, arguments);\n };\n var _Sqrt = Module[\"_Sqrt\"] = function() {\n return (_Sqrt = Module[\"_Sqrt\"] = Module[\"asm\"][\"Sqrt\"]).apply(null, arguments);\n };\n var _Square = Module[\"_Square\"] = function() {\n return (_Square = Module[\"_Square\"] = Module[\"asm\"][\"Square\"]).apply(null, arguments);\n };\n var _SquaredDifference = Module[\"_SquaredDifference\"] = function() {\n return (_SquaredDifference = Module[\"_SquaredDifference\"] = Module[\"asm\"][\"SquaredDifference\"]).apply(null, arguments);\n };\n var _Step = Module[\"_Step\"] = function() {\n return (_Step = Module[\"_Step\"] = Module[\"asm\"][\"Step\"]).apply(null, arguments);\n };\n var _StridedSlice = Module[\"_StridedSlice\"] = function() {\n return (_StridedSlice = Module[\"_StridedSlice\"] = Module[\"asm\"][\"StridedSlice\"]).apply(null, arguments);\n };\n var _Sub = Module[\"_Sub\"] = function() {\n return (_Sub = Module[\"_Sub\"] = Module[\"asm\"][\"Sub\"]).apply(null, arguments);\n };\n var _Sum = Module[\"_Sum\"] = function() {\n return (_Sum = Module[\"_Sum\"] = Module[\"asm\"][\"Sum\"]).apply(null, arguments);\n };\n var _Tan = Module[\"_Tan\"] = function() {\n return (_Tan = Module[\"_Tan\"] = Module[\"asm\"][\"Tan\"]).apply(null, arguments);\n };\n var _Tanh = Module[\"_Tanh\"] = function() {\n return (_Tanh = Module[\"_Tanh\"] = Module[\"asm\"][\"Tanh\"]).apply(null, arguments);\n };\n var _Tile = Module[\"_Tile\"] = function() {\n return (_Tile = Module[\"_Tile\"] = Module[\"asm\"][\"Tile\"]).apply(null, arguments);\n };\n var _TopK = Module[\"_TopK\"] = function() {\n return (_TopK = Module[\"_TopK\"] = Module[\"asm\"][\"TopK\"]).apply(null, arguments);\n };\n var _Transform = Module[\"_Transform\"] = function() {\n return (_Transform = Module[\"_Transform\"] = Module[\"asm\"][\"Transform\"]).apply(null, arguments);\n };\n var _Transpose = Module[\"_Transpose\"] = function() {\n return (_Transpose = Module[\"_Transpose\"] = Module[\"asm\"][\"Transpose\"]).apply(null, arguments);\n };\n var __FusedMatMul = Module[\"__FusedMatMul\"] = function() {\n return (__FusedMatMul = Module[\"__FusedMatMul\"] = Module[\"asm\"][\"_FusedMatMul\"]).apply(null, arguments);\n };\n var _malloc = Module[\"_malloc\"] = function() {\n return (_malloc = Module[\"_malloc\"] = Module[\"asm\"][\"malloc\"]).apply(null, arguments);\n };\n var _free = Module[\"_free\"] = function() {\n return (_free = Module[\"_free\"] = Module[\"asm\"][\"free\"]).apply(null, arguments);\n };\n var _emscripten_tls_init = Module[\"_emscripten_tls_init\"] = function() {\n return (_emscripten_tls_init = Module[\"_emscripten_tls_init\"] = Module[\"asm\"][\"emscripten_tls_init\"]).apply(null, arguments);\n };\n var ___errno_location = Module[\"___errno_location\"] = function() {\n return (___errno_location = Module[\"___errno_location\"] = Module[\"asm\"][\"__errno_location\"]).apply(null, arguments);\n };\n var _pthread_self = Module[\"_pthread_self\"] = function() {\n return (_pthread_self = Module[\"_pthread_self\"] = Module[\"asm\"][\"pthread_self\"]).apply(null, arguments);\n };\n var _emscripten_main_thread_process_queued_calls = Module[\"_emscripten_main_thread_process_queued_calls\"] = function() {\n return (_emscripten_main_thread_process_queued_calls = Module[\"_emscripten_main_thread_process_queued_calls\"] = Module[\"asm\"][\"emscripten_main_thread_process_queued_calls\"]).apply(null, arguments);\n };\n var __emscripten_thread_crashed = Module[\"__emscripten_thread_crashed\"] = function() {\n return (__emscripten_thread_crashed = Module[\"__emscripten_thread_crashed\"] = Module[\"asm\"][\"_emscripten_thread_crashed\"]).apply(null, arguments);\n };\n var __emscripten_thread_init = Module[\"__emscripten_thread_init\"] = function() {\n return (__emscripten_thread_init = Module[\"__emscripten_thread_init\"] = Module[\"asm\"][\"_emscripten_thread_init\"]).apply(null, arguments);\n };\n var _emscripten_current_thread_process_queued_calls = Module[\"_emscripten_current_thread_process_queued_calls\"] = function() {\n return (_emscripten_current_thread_process_queued_calls = Module[\"_emscripten_current_thread_process_queued_calls\"] = Module[\"asm\"][\"emscripten_current_thread_process_queued_calls\"]).apply(null, arguments);\n };\n var _emscripten_main_browser_thread_id = Module[\"_emscripten_main_browser_thread_id\"] = function() {\n return (_emscripten_main_browser_thread_id = Module[\"_emscripten_main_browser_thread_id\"] = Module[\"asm\"][\"emscripten_main_browser_thread_id\"]).apply(null, arguments);\n };\n var _emscripten_sync_run_in_main_thread_2 = Module[\"_emscripten_sync_run_in_main_thread_2\"] = function() {\n return (_emscripten_sync_run_in_main_thread_2 = Module[\"_emscripten_sync_run_in_main_thread_2\"] = Module[\"asm\"][\"emscripten_sync_run_in_main_thread_2\"]).apply(null, arguments);\n };\n var _emscripten_sync_run_in_main_thread_4 = Module[\"_emscripten_sync_run_in_main_thread_4\"] = function() {\n return (_emscripten_sync_run_in_main_thread_4 = Module[\"_emscripten_sync_run_in_main_thread_4\"] = Module[\"asm\"][\"emscripten_sync_run_in_main_thread_4\"]).apply(null, arguments);\n };\n var _emscripten_run_in_main_runtime_thread_js = Module[\"_emscripten_run_in_main_runtime_thread_js\"] = function() {\n return (_emscripten_run_in_main_runtime_thread_js = Module[\"_emscripten_run_in_main_runtime_thread_js\"] = Module[\"asm\"][\"emscripten_run_in_main_runtime_thread_js\"]).apply(null, arguments);\n };\n var _emscripten_dispatch_to_thread_ = Module[\"_emscripten_dispatch_to_thread_\"] = function() {\n return (_emscripten_dispatch_to_thread_ = Module[\"_emscripten_dispatch_to_thread_\"] = Module[\"asm\"][\"emscripten_dispatch_to_thread_\"]).apply(null, arguments);\n };\n var __emscripten_thread_free_data = Module[\"__emscripten_thread_free_data\"] = function() {\n return (__emscripten_thread_free_data = Module[\"__emscripten_thread_free_data\"] = Module[\"asm\"][\"_emscripten_thread_free_data\"]).apply(null, arguments);\n };\n var __emscripten_thread_exit = Module[\"__emscripten_thread_exit\"] = function() {\n return (__emscripten_thread_exit = Module[\"__emscripten_thread_exit\"] = Module[\"asm\"][\"_emscripten_thread_exit\"]).apply(null, arguments);\n };\n var _memalign = Module[\"_memalign\"] = function() {\n return (_memalign = Module[\"_memalign\"] = Module[\"asm\"][\"memalign\"]).apply(null, arguments);\n };\n var _emscripten_stack_set_limits = Module[\"_emscripten_stack_set_limits\"] = function() {\n return (_emscripten_stack_set_limits = Module[\"_emscripten_stack_set_limits\"] = Module[\"asm\"][\"emscripten_stack_set_limits\"]).apply(null, arguments);\n };\n var stackSave = Module[\"stackSave\"] = function() {\n return (stackSave = Module[\"stackSave\"] = Module[\"asm\"][\"stackSave\"]).apply(null, arguments);\n };\n var stackRestore = Module[\"stackRestore\"] = function() {\n return (stackRestore = Module[\"stackRestore\"] = Module[\"asm\"][\"stackRestore\"]).apply(null, arguments);\n };\n var stackAlloc = Module[\"stackAlloc\"] = function() {\n return (stackAlloc = Module[\"stackAlloc\"] = Module[\"asm\"][\"stackAlloc\"]).apply(null, arguments);\n };\n var dynCall_iijjiiii = Module[\"dynCall_iijjiiii\"] = function() {\n return (dynCall_iijjiiii = Module[\"dynCall_iijjiiii\"] = Module[\"asm\"][\"dynCall_iijjiiii\"]).apply(null, arguments);\n };\n var dynCall_jiji = Module[\"dynCall_jiji\"] = function() {\n return (dynCall_jiji = Module[\"dynCall_jiji\"] = Module[\"asm\"][\"dynCall_jiji\"]).apply(null, arguments);\n };\n var __emscripten_allow_main_runtime_queued_calls = Module[\"__emscripten_allow_main_runtime_queued_calls\"] = 21672;\n Module[\"cwrap\"] = cwrap;\n Module[\"keepRuntimeAlive\"] = keepRuntimeAlive;\n Module[\"PThread\"] = PThread;\n Module[\"PThread\"] = PThread;\n Module[\"wasmMemory\"] = wasmMemory;\n Module[\"ExitStatus\"] = ExitStatus;\n var calledRun;\n function ExitStatus(status) {\n this.name = \"ExitStatus\";\n this.message = \"Program terminated with exit(\" + status + \")\";\n this.status = status;\n }\n dependenciesFulfilled = function runCaller() {\n if (!calledRun)\n run();\n if (!calledRun)\n dependenciesFulfilled = runCaller;\n };\n function run(args) {\n args = args || arguments_;\n if (runDependencies > 0) {\n return;\n }\n if (ENVIRONMENT_IS_PTHREAD) {\n readyPromiseResolve(Module);\n initRuntime();\n postMessage({ \"cmd\": \"loaded\" });\n return;\n }\n preRun();\n if (runDependencies > 0) {\n return;\n }\n function doRun() {\n if (calledRun)\n return;\n calledRun = true;\n Module[\"calledRun\"] = true;\n if (ABORT)\n return;\n initRuntime();\n readyPromiseResolve(Module);\n if (Module[\"onRuntimeInitialized\"])\n Module[\"onRuntimeInitialized\"]();\n postRun();\n }\n if (Module[\"setStatus\"]) {\n Module[\"setStatus\"](\"Running...\");\n setTimeout(function() {\n setTimeout(function() {\n Module[\"setStatus\"](\"\");\n }, 1);\n doRun();\n }, 1);\n } else {\n doRun();\n }\n }\n Module[\"run\"] = run;\n function exit(status, implicit) {\n EXITSTATUS = status;\n if (!implicit) {\n if (ENVIRONMENT_IS_PTHREAD) {\n exitOnMainThread(status);\n throw \"unwind\";\n } else {\n }\n }\n if (keepRuntimeAlive()) {\n } else {\n exitRuntime();\n }\n procExit(status);\n }\n function procExit(code) {\n EXITSTATUS = code;\n if (!keepRuntimeAlive()) {\n PThread.terminateAllThreads();\n if (Module[\"onExit\"])\n Module[\"onExit\"](code);\n ABORT = true;\n }\n quit_(code, new ExitStatus(code));\n }\n if (Module[\"preInit\"]) {\n if (typeof Module[\"preInit\"] == \"function\")\n Module[\"preInit\"] = [Module[\"preInit\"]];\n while (Module[\"preInit\"].length > 0) {\n Module[\"preInit\"].pop()();\n }\n }\n run();\n var listenersAdded;\n if (beforeListeners) {\n listenersAdded = { uncaughtException: process.listeners(\"uncaughtException\").filter(function(listener) {\n return !beforeListeners.uncaughtException.indexOf(listener) > -1;\n }), unhandledRejection: process.listeners(\"unhandledRejection\").filter(function(listener) {\n return !beforeListeners.unhandledRejection.indexOf(listener) > -1;\n }) };\n }\n var actualModule;\n if (typeof WasmBackendModule !== \"undefined\") {\n actualModule = WasmBackendModule;\n } else if (typeof WasmBackendModuleThreadedSimd3 !== \"undefined\") {\n actualModule = WasmBackendModuleThreadedSimd3;\n } else {\n throw new Error(\"Could not find wasm module in post.js\");\n }\n if (listenersAdded) {\n var tmpDispose = actualModule[\"_dispose\"];\n actualModule[\"_dispose\"] = function() {\n tmpDispose();\n listenersAdded.uncaughtException.forEach(function(listener) {\n process.removeListener(\"uncaughtException\", listener);\n });\n listenersAdded.unhandledRejection.forEach(function(listener) {\n process.removeListener(\"unhandledRejection\", listener);\n });\n };\n }\n return WasmBackendModuleThreadedSimd3.ready;\n };\n })();\n if (typeof exports === \"object\" && typeof module === \"object\")\n module.exports = WasmBackendModuleThreadedSimd2;\n else if (typeof define === \"function\" && define[\"amd\"])\n define([], function() {\n return WasmBackendModuleThreadedSimd2;\n });\n else if (typeof exports === \"object\")\n exports[\"WasmBackendModuleThreadedSimd\"] = WasmBackendModuleThreadedSimd2;\n }\n});\n\n// node_modules/.pnpm/@tensorflow+tfjs-backend-wasm@3.19.0_hek32lflchivueqv5i4vgonghu/node_modules/@tensorflow/tfjs-backend-wasm/wasm-out/tfjs-backend-wasm-threaded-simd.worker.js\nvar require_tfjs_backend_wasm_threaded_simd_worker = __commonJS({\n \"node_modules/.pnpm/@tensorflow+tfjs-backend-wasm@3.19.0_hek32lflchivueqv5i4vgonghu/node_modules/@tensorflow/tfjs-backend-wasm/wasm-out/tfjs-backend-wasm-threaded-simd.worker.js\"(exports, module) {\n module.exports.wasmWorkerContents = `\"use strict\";var Module={};var ENVIRONMENT_IS_NODE=typeof process===\"object\"&&typeof process.versions===\"object\"&&typeof process.versions.node===\"string\";if(ENVIRONMENT_IS_NODE){var nodeWorkerThreads=require(\"worker_threads\");var parentPort=nodeWorkerThreads.parentPort;parentPort.on(\"message\",function(data){onmessage({data:data})});var fs=require(\"fs\");Object.assign(global,{self:global,require:require,Module:Module,location:{href:__filename},Worker:nodeWorkerThreads.Worker,importScripts:function(f){(0,eval)(fs.readFileSync(f,\"utf8\"))},postMessage:function(msg){parentPort.postMessage(msg)},performance:global.performance||{now:function(){return Date.now()}}})}function threadPrintErr(){var text=Array.prototype.slice.call(arguments).join(\" \");if(ENVIRONMENT_IS_NODE){fs.writeSync(2,text+\"\n\");return}console.error(text)}function threadAlert(){var text=Array.prototype.slice.call(arguments).join(\" \");postMessage({cmd:\"alert\",text:text,threadId:Module[\"_pthread_self\"]()})}var err=threadPrintErr;self.alert=threadAlert;Module[\"instantiateWasm\"]=((info,receiveInstance)=>{var instance=new WebAssembly.Instance(Module[\"wasmModule\"],info);receiveInstance(instance);Module[\"wasmModule\"]=null;return instance.exports});self.onmessage=(e=>{try{if(e.data.cmd===\"load\"){Module[\"wasmModule\"]=e.data.wasmModule;Module[\"wasmMemory\"]=e.data.wasmMemory;Module[\"buffer\"]=Module[\"wasmMemory\"].buffer;Module[\"ENVIRONMENT_IS_PTHREAD\"]=true;if(typeof e.data.urlOrBlob===\"string\"){importScripts(e.data.urlOrBlob)}else{var objectUrl=URL.createObjectURL(e.data.urlOrBlob);importScripts(objectUrl);URL.revokeObjectURL(objectUrl)}WasmBackendModuleThreadedSimd(Module).then(function(instance){Module=instance})}else if(e.data.cmd===\"run\"){Module[\"__performance_now_clock_drift\"]=performance.now()-e.data.time;Module[\"__emscripten_thread_init\"](e.data.threadInfoStruct,0,0,1);Module[\"establishStackSpace\"]();Module[\"PThread\"].receiveObjectTransfer(e.data);Module[\"PThread\"].threadInit();try{var result=Module[\"invokeEntryPoint\"](e.data.start_routine,e.data.arg);if(Module[\"keepRuntimeAlive\"]()){Module[\"PThread\"].setExitStatus(result)}else{Module[\"__emscripten_thread_exit\"](result)}}catch(ex){if(ex!=\"unwind\"){if(ex instanceof Module[\"ExitStatus\"]){if(Module[\"keepRuntimeAlive\"]()){}else{Module[\"__emscripten_thread_exit\"](ex.status)}}else{throw ex}}}}else if(e.data.cmd===\"cancel\"){if(Module[\"_pthread_self\"]()){Module[\"__emscripten_thread_exit\"](-1)}}else if(e.data.target===\"setimmediate\"){}else if(e.data.cmd===\"processThreadQueue\"){if(Module[\"_pthread_self\"]()){Module[\"_emscripten_current_thread_process_queued_calls\"]()}}else if(e.data.cmd===\"processProxyingQueue\"){if(Module[\"_pthread_self\"]()){Module[\"_emscripten_proxy_execute_queue\"](e.data.queue)}}else{err(\"worker.js received unknown command \"+e.data.cmd);err(e.data)}}catch(ex){err(\"worker.js onmessage() captured an uncaught exception: \"+ex);if(ex&&ex.stack)err(ex.stack);if(Module[\"__emscripten_thread_crashed\"]){Module[\"__emscripten_thread_crashed\"]()}throw ex}});`;\n }\n});\n\n// node_modules/.pnpm/@tensorflow+tfjs-backend-wasm@3.19.0_hek32lflchivueqv5i4vgonghu/node_modules/@tensorflow/tfjs-backend-wasm/wasm-out/tfjs-backend-wasm.js\nvar require_tfjs_backend_wasm = __commonJS({\n \"node_modules/.pnpm/@tensorflow+tfjs-backend-wasm@3.19.0_hek32lflchivueqv5i4vgonghu/node_modules/@tensorflow/tfjs-backend-wasm/wasm-out/tfjs-backend-wasm.js\"(exports, module) {\n var WasmBackendModule2 = (() => {\n var _scriptDir = typeof document !== \"undefined\" && document.currentScript ? document.currentScript.src : void 0;\n if (typeof __filename !== \"undefined\")\n _scriptDir = _scriptDir || __filename;\n return function(WasmBackendModule3) {\n WasmBackendModule3 = WasmBackendModule3 || {};\n var Module = typeof WasmBackendModule3 !== \"undefined\" ? WasmBackendModule3 : {};\n var readyPromiseResolve, readyPromiseReject;\n Module[\"ready\"] = new Promise(function(resolve, reject) {\n readyPromiseResolve = resolve;\n readyPromiseReject = reject;\n });\n var beforeListeners;\n if (typeof process !== \"undefined\" && process.listeners) {\n beforeListeners = { uncaughtException: process.listeners(\"uncaughtException\"), unhandledRejection: process.listeners(\"unhandledRejection\") };\n }\n var moduleOverrides = Object.assign({}, Module);\n var arguments_ = [];\n var thisProgram = \"./this.program\";\n var quit_ = (status, toThrow) => {\n throw toThrow;\n };\n var ENVIRONMENT_IS_WEB = typeof window === \"object\";\n var ENVIRONMENT_IS_WORKER = typeof importScripts === \"function\";\n var ENVIRONMENT_IS_NODE = typeof process === \"object\" && typeof process.versions === \"object\" && typeof process.versions.node === \"string\";\n var scriptDirectory = \"\";\n function locateFile(path) {\n if (Module[\"locateFile\"]) {\n return Module[\"locateFile\"](path, scriptDirectory);\n }\n return scriptDirectory + path;\n }\n var read_, readAsync, readBinary, setWindowTitle;\n function logExceptionOnExit(e2) {\n if (e2 instanceof ExitStatus)\n return;\n let toLog = e2;\n err(\"exiting due to exception: \" + toLog);\n }\n var fs;\n var nodePath;\n var requireNodeFS;\n if (ENVIRONMENT_IS_NODE) {\n if (ENVIRONMENT_IS_WORKER) {\n scriptDirectory = require_path().dirname(scriptDirectory) + \"/\";\n } else {\n scriptDirectory = __dirname + \"/\";\n }\n requireNodeFS = () => {\n if (!nodePath) {\n fs = require_fs();\n nodePath = require_path();\n }\n };\n read_ = function shell_read(filename, binary) {\n requireNodeFS();\n filename = nodePath[\"normalize\"](filename);\n return fs.readFileSync(filename, binary ? void 0 : \"utf8\");\n };\n readBinary = (filename) => {\n var ret = read_(filename, true);\n if (!ret.buffer) {\n ret = new Uint8Array(ret);\n }\n return ret;\n };\n readAsync = (filename, onload, onerror) => {\n requireNodeFS();\n filename = nodePath[\"normalize\"](filename);\n fs.readFile(filename, function(err2, data) {\n if (err2)\n onerror(err2);\n else\n onload(data.buffer);\n });\n };\n if (process[\"argv\"].length > 1) {\n thisProgram = process[\"argv\"][1].replace(/\\\\/g, \"/\");\n }\n arguments_ = process[\"argv\"].slice(2);\n process[\"on\"](\"uncaughtException\", function(ex) {\n if (!(ex instanceof ExitStatus)) {\n throw ex;\n }\n });\n process[\"on\"](\"unhandledRejection\", function(reason) {\n throw reason;\n });\n quit_ = (status, toThrow) => {\n if (keepRuntimeAlive()) {\n process[\"exitCode\"] = status;\n throw toThrow;\n }\n logExceptionOnExit(toThrow);\n process[\"exit\"](status);\n };\n Module[\"inspect\"] = function() {\n return \"[Emscripten Module object]\";\n };\n } else if (ENVIRONMENT_IS_WEB || ENVIRONMENT_IS_WORKER) {\n if (ENVIRONMENT_IS_WORKER) {\n scriptDirectory = self.location.href;\n } else if (typeof document !== \"undefined\" && document.currentScript) {\n scriptDirectory = document.currentScript.src;\n }\n if (_scriptDir) {\n scriptDirectory = _scriptDir;\n }\n if (scriptDirectory.indexOf(\"blob:\") !== 0) {\n scriptDirectory = scriptDirectory.substr(0, scriptDirectory.replace(/[?#].*/, \"\").lastIndexOf(\"/\") + 1);\n } else {\n scriptDirectory = \"\";\n }\n {\n read_ = (url) => {\n var xhr = new XMLHttpRequest();\n xhr.open(\"GET\", url, false);\n xhr.send(null);\n return xhr.responseText;\n };\n if (ENVIRONMENT_IS_WORKER) {\n readBinary = (url) => {\n var xhr = new XMLHttpRequest();\n xhr.open(\"GET\", url, false);\n xhr.responseType = \"arraybuffer\";\n xhr.send(null);\n return new Uint8Array(xhr.response);\n };\n }\n readAsync = (url, onload, onerror) => {\n var xhr = new XMLHttpRequest();\n xhr.open(\"GET\", url, true);\n xhr.responseType = \"arraybuffer\";\n xhr.onload = () => {\n if (xhr.status == 200 || xhr.status == 0 && xhr.response) {\n onload(xhr.response);\n return;\n }\n onerror();\n };\n xhr.onerror = onerror;\n xhr.send(null);\n };\n }\n setWindowTitle = (title) => document.title = title;\n } else {\n }\n var out = Module[\"print\"] || console.log.bind(console);\n var err = Module[\"printErr\"] || console.warn.bind(console);\n Object.assign(Module, moduleOverrides);\n moduleOverrides = null;\n if (Module[\"arguments\"])\n arguments_ = Module[\"arguments\"];\n if (Module[\"thisProgram\"])\n thisProgram = Module[\"thisProgram\"];\n if (Module[\"quit\"])\n quit_ = Module[\"quit\"];\n var POINTER_SIZE = 4;\n function warnOnce(text) {\n if (!warnOnce.shown)\n warnOnce.shown = {};\n if (!warnOnce.shown[text]) {\n warnOnce.shown[text] = 1;\n err(text);\n }\n }\n function convertJsFunctionToWasm(func2, sig) {\n if (typeof WebAssembly.Function === \"function\") {\n var typeNames = { \"i\": \"i32\", \"j\": \"i64\", \"f\": \"f32\", \"d\": \"f64\" };\n var type = { parameters: [], results: sig[0] == \"v\" ? [] : [typeNames[sig[0]]] };\n for (var i2 = 1; i2 < sig.length; ++i2) {\n type.parameters.push(typeNames[sig[i2]]);\n }\n return new WebAssembly.Function(type, func2);\n }\n var typeSection = [1, 0, 1, 96];\n var sigRet = sig.slice(0, 1);\n var sigParam = sig.slice(1);\n var typeCodes = { \"i\": 127, \"j\": 126, \"f\": 125, \"d\": 124 };\n typeSection.push(sigParam.length);\n for (var i2 = 0; i2 < sigParam.length; ++i2) {\n typeSection.push(typeCodes[sigParam[i2]]);\n }\n if (sigRet == \"v\") {\n typeSection.push(0);\n } else {\n typeSection = typeSection.concat([1, typeCodes[sigRet]]);\n }\n typeSection[1] = typeSection.length - 2;\n var bytes = new Uint8Array([0, 97, 115, 109, 1, 0, 0, 0].concat(typeSection, [2, 7, 1, 1, 101, 1, 102, 0, 0, 7, 5, 1, 1, 102, 0, 0]));\n var module2 = new WebAssembly.Module(bytes);\n var instance = new WebAssembly.Instance(module2, { \"e\": { \"f\": func2 } });\n var wrappedFunc = instance.exports[\"f\"];\n return wrappedFunc;\n }\n var freeTableIndexes = [];\n var functionsInTableMap;\n function getEmptyTableSlot() {\n if (freeTableIndexes.length) {\n return freeTableIndexes.pop();\n }\n try {\n wasmTable.grow(1);\n } catch (err2) {\n if (!(err2 instanceof RangeError)) {\n throw err2;\n }\n throw \"Unable to grow wasm table. Set ALLOW_TABLE_GROWTH.\";\n }\n return wasmTable.length - 1;\n }\n function updateTableMap(offset, count2) {\n for (var i2 = offset; i2 < offset + count2; i2++) {\n var item = getWasmTableEntry(i2);\n if (item) {\n functionsInTableMap.set(item, i2);\n }\n }\n }\n var tempRet0 = 0;\n var setTempRet0 = (value) => {\n tempRet0 = value;\n };\n var wasmBinary;\n if (Module[\"wasmBinary\"])\n wasmBinary = Module[\"wasmBinary\"];\n var noExitRuntime = Module[\"noExitRuntime\"] || true;\n if (typeof WebAssembly !== \"object\") {\n abort(\"no native wasm support detected\");\n }\n var wasmMemory;\n var ABORT = false;\n var EXITSTATUS;\n function assert3(condition, text) {\n if (!condition) {\n abort(text);\n }\n }\n function getCFunc(ident) {\n var func2 = Module[\"_\" + ident];\n return func2;\n }\n function ccall(ident, returnType, argTypes, args, opts) {\n var toC = { \"string\": function(str) {\n var ret2 = 0;\n if (str !== null && str !== void 0 && str !== 0) {\n var len = (str.length << 2) + 1;\n ret2 = stackAlloc(len);\n stringToUTF8(str, ret2, len);\n }\n return ret2;\n }, \"array\": function(arr) {\n var ret2 = stackAlloc(arr.length);\n writeArrayToMemory(arr, ret2);\n return ret2;\n } };\n function convertReturnValue(ret2) {\n if (returnType === \"string\")\n return UTF8ToString(ret2);\n if (returnType === \"boolean\")\n return Boolean(ret2);\n return ret2;\n }\n var func2 = getCFunc(ident);\n var cArgs = [];\n var stack2 = 0;\n if (args) {\n for (var i2 = 0; i2 < args.length; i2++) {\n var converter = toC[argTypes[i2]];\n if (converter) {\n if (stack2 === 0)\n stack2 = stackSave();\n cArgs[i2] = converter(args[i2]);\n } else {\n cArgs[i2] = args[i2];\n }\n }\n }\n var ret = func2.apply(null, cArgs);\n function onDone(ret2) {\n if (stack2 !== 0)\n stackRestore(stack2);\n return convertReturnValue(ret2);\n }\n ret = onDone(ret);\n return ret;\n }\n function cwrap(ident, returnType, argTypes, opts) {\n argTypes = argTypes || [];\n var numericArgs = argTypes.every(function(type) {\n return type === \"number\";\n });\n var numericRet = returnType !== \"string\";\n if (numericRet && numericArgs && !opts) {\n return getCFunc(ident);\n }\n return function() {\n return ccall(ident, returnType, argTypes, arguments, opts);\n };\n }\n var ALLOC_STACK = 1;\n var UTF8Decoder = typeof TextDecoder !== \"undefined\" ? new TextDecoder(\"utf8\") : void 0;\n function UTF8ArrayToString(heap, idx, maxBytesToRead) {\n var endIdx = idx + maxBytesToRead;\n var endPtr = idx;\n while (heap[endPtr] && !(endPtr >= endIdx))\n ++endPtr;\n if (endPtr - idx > 16 && heap.subarray && UTF8Decoder) {\n return UTF8Decoder.decode(heap.subarray(idx, endPtr));\n } else {\n var str = \"\";\n while (idx < endPtr) {\n var u0 = heap[idx++];\n if (!(u0 & 128)) {\n str += String.fromCharCode(u0);\n continue;\n }\n var u1 = heap[idx++] & 63;\n if ((u0 & 224) == 192) {\n str += String.fromCharCode((u0 & 31) << 6 | u1);\n continue;\n }\n var u2 = heap[idx++] & 63;\n if ((u0 & 240) == 224) {\n u0 = (u0 & 15) << 12 | u1 << 6 | u2;\n } else {\n u0 = (u0 & 7) << 18 | u1 << 12 | u2 << 6 | heap[idx++] & 63;\n }\n if (u0 < 65536) {\n str += String.fromCharCode(u0);\n } else {\n var ch = u0 - 65536;\n str += String.fromCharCode(55296 | ch >> 10, 56320 | ch & 1023);\n }\n }\n }\n return str;\n }\n function UTF8ToString(ptr, maxBytesToRead) {\n return ptr ? UTF8ArrayToString(HEAPU8, ptr, maxBytesToRead) : \"\";\n }\n function stringToUTF8Array(str, heap, outIdx, maxBytesToWrite) {\n if (!(maxBytesToWrite > 0))\n return 0;\n var startIdx = outIdx;\n var endIdx = outIdx + maxBytesToWrite - 1;\n for (var i2 = 0; i2 < str.length; ++i2) {\n var u = str.charCodeAt(i2);\n if (u >= 55296 && u <= 57343) {\n var u1 = str.charCodeAt(++i2);\n u = 65536 + ((u & 1023) << 10) | u1 & 1023;\n }\n if (u <= 127) {\n if (outIdx >= endIdx)\n break;\n heap[outIdx++] = u;\n } else if (u <= 2047) {\n if (outIdx + 1 >= endIdx)\n break;\n heap[outIdx++] = 192 | u >> 6;\n heap[outIdx++] = 128 | u & 63;\n } else if (u <= 65535) {\n if (outIdx + 2 >= endIdx)\n break;\n heap[outIdx++] = 224 | u >> 12;\n heap[outIdx++] = 128 | u >> 6 & 63;\n heap[outIdx++] = 128 | u & 63;\n } else {\n if (outIdx + 3 >= endIdx)\n break;\n heap[outIdx++] = 240 | u >> 18;\n heap[outIdx++] = 128 | u >> 12 & 63;\n heap[outIdx++] = 128 | u >> 6 & 63;\n heap[outIdx++] = 128 | u & 63;\n }\n }\n heap[outIdx] = 0;\n return outIdx - startIdx;\n }\n function stringToUTF8(str, outPtr, maxBytesToWrite) {\n return stringToUTF8Array(str, HEAPU8, outPtr, maxBytesToWrite);\n }\n function lengthBytesUTF8(str) {\n var len = 0;\n for (var i2 = 0; i2 < str.length; ++i2) {\n var u = str.charCodeAt(i2);\n if (u >= 55296 && u <= 57343)\n u = 65536 + ((u & 1023) << 10) | str.charCodeAt(++i2) & 1023;\n if (u <= 127)\n ++len;\n else if (u <= 2047)\n len += 2;\n else if (u <= 65535)\n len += 3;\n else\n len += 4;\n }\n return len;\n }\n var UTF16Decoder = typeof TextDecoder !== \"undefined\" ? new TextDecoder(\"utf-16le\") : void 0;\n function writeArrayToMemory(array2, buffer3) {\n HEAP8.set(array2, buffer3);\n }\n function writeAsciiToMemory(str, buffer3, dontAddNull) {\n for (var i2 = 0; i2 < str.length; ++i2) {\n HEAP8[buffer3++ >> 0] = str.charCodeAt(i2);\n }\n if (!dontAddNull)\n HEAP8[buffer3 >> 0] = 0;\n }\n function alignUp(x, multiple) {\n if (x % multiple > 0) {\n x += multiple - x % multiple;\n }\n return x;\n }\n var buffer2, HEAP8, HEAPU8, HEAP16, HEAPU16, HEAP32, HEAPU32, HEAPF32, HEAPF64;\n function updateGlobalBufferAndViews(buf) {\n buffer2 = buf;\n Module[\"HEAP8\"] = HEAP8 = new Int8Array(buf);\n Module[\"HEAP16\"] = HEAP16 = new Int16Array(buf);\n Module[\"HEAP32\"] = HEAP32 = new Int32Array(buf);\n Module[\"HEAPU8\"] = HEAPU8 = new Uint8Array(buf);\n Module[\"HEAPU16\"] = HEAPU16 = new Uint16Array(buf);\n Module[\"HEAPU32\"] = HEAPU32 = new Uint32Array(buf);\n Module[\"HEAPF32\"] = HEAPF32 = new Float32Array(buf);\n Module[\"HEAPF64\"] = HEAPF64 = new Float64Array(buf);\n }\n var INITIAL_MEMORY = Module[\"INITIAL_MEMORY\"] || 16777216;\n var wasmTable;\n var __ATPRERUN__ = [];\n var __ATINIT__ = [];\n var __ATPOSTRUN__ = [];\n var runtimeInitialized = false;\n var runtimeExited = false;\n var runtimeKeepaliveCounter = 0;\n function keepRuntimeAlive() {\n return noExitRuntime || runtimeKeepaliveCounter > 0;\n }\n function preRun() {\n if (Module[\"preRun\"]) {\n if (typeof Module[\"preRun\"] == \"function\")\n Module[\"preRun\"] = [Module[\"preRun\"]];\n while (Module[\"preRun\"].length) {\n addOnPreRun(Module[\"preRun\"].shift());\n }\n }\n callRuntimeCallbacks(__ATPRERUN__);\n }\n function initRuntime() {\n runtimeInitialized = true;\n callRuntimeCallbacks(__ATINIT__);\n }\n function exitRuntime() {\n runtimeExited = true;\n }\n function postRun() {\n if (Module[\"postRun\"]) {\n if (typeof Module[\"postRun\"] == \"function\")\n Module[\"postRun\"] = [Module[\"postRun\"]];\n while (Module[\"postRun\"].length) {\n addOnPostRun(Module[\"postRun\"].shift());\n }\n }\n callRuntimeCallbacks(__ATPOSTRUN__);\n }\n function addOnPreRun(cb) {\n __ATPRERUN__.unshift(cb);\n }\n function addOnInit(cb) {\n __ATINIT__.unshift(cb);\n }\n function addOnPostRun(cb) {\n __ATPOSTRUN__.unshift(cb);\n }\n var runDependencies = 0;\n var runDependencyWatcher = null;\n var dependenciesFulfilled = null;\n function addRunDependency(id) {\n runDependencies++;\n if (Module[\"monitorRunDependencies\"]) {\n Module[\"monitorRunDependencies\"](runDependencies);\n }\n }\n function removeRunDependency(id) {\n runDependencies--;\n if (Module[\"monitorRunDependencies\"]) {\n Module[\"monitorRunDependencies\"](runDependencies);\n }\n if (runDependencies == 0) {\n if (runDependencyWatcher !== null) {\n clearInterval(runDependencyWatcher);\n runDependencyWatcher = null;\n }\n if (dependenciesFulfilled) {\n var callback = dependenciesFulfilled;\n dependenciesFulfilled = null;\n callback();\n }\n }\n }\n Module[\"preloadedImages\"] = {};\n Module[\"preloadedAudios\"] = {};\n function abort(what) {\n {\n if (Module[\"onAbort\"]) {\n Module[\"onAbort\"](what);\n }\n }\n what = \"Aborted(\" + what + \")\";\n err(what);\n ABORT = true;\n EXITSTATUS = 1;\n what += \". Build with -s ASSERTIONS=1 for more info.\";\n var e2 = new WebAssembly.RuntimeError(what);\n readyPromiseReject(e2);\n throw e2;\n }\n var dataURIPrefix = \"data:application/octet-stream;base64,\";\n function isDataURI(filename) {\n return filename.startsWith(dataURIPrefix);\n }\n function isFileURI(filename) {\n return filename.startsWith(\"file://\");\n }\n var wasmBinaryFile;\n wasmBinaryFile = \"tfjs-backend-wasm.wasm\";\n if (!isDataURI(wasmBinaryFile)) {\n wasmBinaryFile = locateFile(wasmBinaryFile);\n }\n function getBinary(file) {\n try {\n if (file == wasmBinaryFile && wasmBinary) {\n return new Uint8Array(wasmBinary);\n }\n if (readBinary) {\n return readBinary(file);\n } else {\n throw \"both async and sync fetching of the wasm failed\";\n }\n } catch (err2) {\n abort(err2);\n }\n }\n function getBinaryPromise() {\n if (!wasmBinary && (ENVIRONMENT_IS_WEB || ENVIRONMENT_IS_WORKER)) {\n if (typeof fetch === \"function\" && !isFileURI(wasmBinaryFile)) {\n return fetch(wasmBinaryFile, { credentials: \"same-origin\" }).then(function(response) {\n if (!response[\"ok\"]) {\n throw \"failed to load wasm binary file at '\" + wasmBinaryFile + \"'\";\n }\n return response[\"arrayBuffer\"]();\n }).catch(function() {\n return getBinary(wasmBinaryFile);\n });\n } else {\n if (readAsync) {\n return new Promise(function(resolve, reject) {\n readAsync(wasmBinaryFile, function(response) {\n resolve(new Uint8Array(response));\n }, reject);\n });\n }\n }\n }\n return Promise.resolve().then(function() {\n return getBinary(wasmBinaryFile);\n });\n }\n function createWasm() {\n var info = { \"env\": asmLibraryArg, \"wasi_snapshot_preview1\": asmLibraryArg };\n function receiveInstance(instance, module2) {\n var exports3 = instance.exports;\n Module[\"asm\"] = exports3;\n wasmMemory = Module[\"asm\"][\"memory\"];\n updateGlobalBufferAndViews(wasmMemory.buffer);\n wasmTable = Module[\"asm\"][\"__indirect_function_table\"];\n addOnInit(Module[\"asm\"][\"__wasm_call_ctors\"]);\n removeRunDependency(\"wasm-instantiate\");\n }\n addRunDependency(\"wasm-instantiate\");\n function receiveInstantiationResult(result) {\n receiveInstance(result[\"instance\"]);\n }\n function instantiateArrayBuffer(receiver) {\n return getBinaryPromise().then(function(binary) {\n return WebAssembly.instantiate(binary, info);\n }).then(function(instance) {\n return instance;\n }).then(receiver, function(reason) {\n err(\"failed to asynchronously prepare wasm: \" + reason);\n abort(reason);\n });\n }\n function instantiateAsync() {\n if (!wasmBinary && typeof WebAssembly.instantiateStreaming === \"function\" && !isDataURI(wasmBinaryFile) && !isFileURI(wasmBinaryFile) && typeof fetch === \"function\") {\n return fetch(wasmBinaryFile, { credentials: \"same-origin\" }).then(function(response) {\n var result = WebAssembly.instantiateStreaming(response, info);\n return result.then(receiveInstantiationResult, function(reason) {\n err(\"wasm streaming compile failed: \" + reason);\n err(\"falling back to ArrayBuffer instantiation\");\n return instantiateArrayBuffer(receiveInstantiationResult);\n });\n });\n } else {\n return instantiateArrayBuffer(receiveInstantiationResult);\n }\n }\n if (Module[\"instantiateWasm\"]) {\n try {\n var exports2 = Module[\"instantiateWasm\"](info, receiveInstance);\n return exports2;\n } catch (e2) {\n err(\"Module.instantiateWasm callback failed with error: \" + e2);\n return false;\n }\n }\n instantiateAsync().catch(readyPromiseReject);\n return {};\n }\n var tempDouble;\n var tempI64;\n function callRuntimeCallbacks(callbacks2) {\n while (callbacks2.length > 0) {\n var callback = callbacks2.shift();\n if (typeof callback == \"function\") {\n callback(Module);\n continue;\n }\n var func2 = callback.func;\n if (typeof func2 === \"number\") {\n if (callback.arg === void 0) {\n getWasmTableEntry(func2)();\n } else {\n getWasmTableEntry(func2)(callback.arg);\n }\n } else {\n func2(callback.arg === void 0 ? null : callback.arg);\n }\n }\n }\n function demangle(func2) {\n return func2;\n }\n function demangleAll(text) {\n var regex = /\\b_Z[\\w\\d_]+/g;\n return text.replace(regex, function(x) {\n var y = demangle(x);\n return x === y ? x : y + \" [\" + x + \"]\";\n });\n }\n var wasmTableMirror = [];\n function getWasmTableEntry(funcPtr) {\n var func2 = wasmTableMirror[funcPtr];\n if (!func2) {\n if (funcPtr >= wasmTableMirror.length)\n wasmTableMirror.length = funcPtr + 1;\n wasmTableMirror[funcPtr] = func2 = wasmTable.get(funcPtr);\n }\n return func2;\n }\n function jsStackTrace() {\n var error = new Error();\n if (!error.stack) {\n try {\n throw new Error();\n } catch (e2) {\n error = e2;\n }\n if (!error.stack) {\n return \"(no stack trace available)\";\n }\n }\n return error.stack.toString();\n }\n function setWasmTableEntry(idx, func2) {\n wasmTable.set(idx, func2);\n wasmTableMirror[idx] = func2;\n }\n function _abort() {\n abort(\"\");\n }\n function _emscripten_get_heap_max() {\n return 2147483648;\n }\n function _emscripten_memcpy_big(dest, src, num) {\n HEAPU8.copyWithin(dest, src, src + num);\n }\n function emscripten_realloc_buffer(size) {\n try {\n wasmMemory.grow(size - buffer2.byteLength + 65535 >>> 16);\n updateGlobalBufferAndViews(wasmMemory.buffer);\n return 1;\n } catch (e2) {\n }\n }\n function _emscripten_resize_heap(requestedSize) {\n var oldSize = HEAPU8.length;\n requestedSize = requestedSize >>> 0;\n var maxHeapSize = _emscripten_get_heap_max();\n if (requestedSize > maxHeapSize) {\n return false;\n }\n for (var cutDown = 1; cutDown <= 4; cutDown *= 2) {\n var overGrownHeapSize = oldSize * (1 + 0.2 / cutDown);\n overGrownHeapSize = Math.min(overGrownHeapSize, requestedSize + 100663296);\n var newSize = Math.min(maxHeapSize, alignUp(Math.max(requestedSize, overGrownHeapSize), 65536));\n var replacement = emscripten_realloc_buffer(newSize);\n if (replacement) {\n return true;\n }\n }\n return false;\n }\n var SYSCALLS = { mappings: {}, buffers: [null, [], []], printChar: function(stream, curr) {\n var buffer3 = SYSCALLS.buffers[stream];\n if (curr === 0 || curr === 10) {\n (stream === 1 ? out : err)(UTF8ArrayToString(buffer3, 0));\n buffer3.length = 0;\n } else {\n buffer3.push(curr);\n }\n }, varargs: void 0, get: function() {\n SYSCALLS.varargs += 4;\n var ret = HEAP32[SYSCALLS.varargs - 4 >> 2];\n return ret;\n }, getStr: function(ptr) {\n var ret = UTF8ToString(ptr);\n return ret;\n }, get64: function(low, high) {\n return low;\n } };\n function _fd_close(fd) {\n return 0;\n }\n function _fd_seek(fd, offset_low, offset_high, whence, newOffset) {\n }\n function _fd_write(fd, iov, iovcnt, pnum) {\n var num = 0;\n for (var i2 = 0; i2 < iovcnt; i2++) {\n var ptr = HEAP32[iov >> 2];\n var len = HEAP32[iov + 4 >> 2];\n iov += 8;\n for (var j = 0; j < len; j++) {\n SYSCALLS.printChar(fd, HEAPU8[ptr + j]);\n }\n num += len;\n }\n HEAP32[pnum >> 2] = num;\n return 0;\n }\n function _setTempRet0(val) {\n setTempRet0(val);\n }\n var ASSERTIONS = false;\n var asmLibraryArg = { \"abort\": _abort, \"emscripten_get_heap_max\": _emscripten_get_heap_max, \"emscripten_memcpy_big\": _emscripten_memcpy_big, \"emscripten_resize_heap\": _emscripten_resize_heap, \"fd_close\": _fd_close, \"fd_seek\": _fd_seek, \"fd_write\": _fd_write, \"setTempRet0\": _setTempRet0 };\n var asm = createWasm();\n var ___wasm_call_ctors = Module[\"___wasm_call_ctors\"] = function() {\n return (___wasm_call_ctors = Module[\"___wasm_call_ctors\"] = Module[\"asm\"][\"__wasm_call_ctors\"]).apply(null, arguments);\n };\n var _init = Module[\"_init\"] = function() {\n return (_init = Module[\"_init\"] = Module[\"asm\"][\"init\"]).apply(null, arguments);\n };\n var _init_with_threads_count = Module[\"_init_with_threads_count\"] = function() {\n return (_init_with_threads_count = Module[\"_init_with_threads_count\"] = Module[\"asm\"][\"init_with_threads_count\"]).apply(null, arguments);\n };\n var _get_threads_count = Module[\"_get_threads_count\"] = function() {\n return (_get_threads_count = Module[\"_get_threads_count\"] = Module[\"asm\"][\"get_threads_count\"]).apply(null, arguments);\n };\n var _register_tensor = Module[\"_register_tensor\"] = function() {\n return (_register_tensor = Module[\"_register_tensor\"] = Module[\"asm\"][\"register_tensor\"]).apply(null, arguments);\n };\n var _dispose_data = Module[\"_dispose_data\"] = function() {\n return (_dispose_data = Module[\"_dispose_data\"] = Module[\"asm\"][\"dispose_data\"]).apply(null, arguments);\n };\n var _dispose = Module[\"_dispose\"] = function() {\n return (_dispose = Module[\"_dispose\"] = Module[\"asm\"][\"dispose\"]).apply(null, arguments);\n };\n var _Abs = Module[\"_Abs\"] = function() {\n return (_Abs = Module[\"_Abs\"] = Module[\"asm\"][\"Abs\"]).apply(null, arguments);\n };\n var _Add = Module[\"_Add\"] = function() {\n return (_Add = Module[\"_Add\"] = Module[\"asm\"][\"Add\"]).apply(null, arguments);\n };\n var _AddN = Module[\"_AddN\"] = function() {\n return (_AddN = Module[\"_AddN\"] = Module[\"asm\"][\"AddN\"]).apply(null, arguments);\n };\n var _All = Module[\"_All\"] = function() {\n return (_All = Module[\"_All\"] = Module[\"asm\"][\"All\"]).apply(null, arguments);\n };\n var _Any = Module[\"_Any\"] = function() {\n return (_Any = Module[\"_Any\"] = Module[\"asm\"][\"Any\"]).apply(null, arguments);\n };\n var _ArgMax = Module[\"_ArgMax\"] = function() {\n return (_ArgMax = Module[\"_ArgMax\"] = Module[\"asm\"][\"ArgMax\"]).apply(null, arguments);\n };\n var _AvgPool = Module[\"_AvgPool\"] = function() {\n return (_AvgPool = Module[\"_AvgPool\"] = Module[\"asm\"][\"AvgPool\"]).apply(null, arguments);\n };\n var _BatchMatMul = Module[\"_BatchMatMul\"] = function() {\n return (_BatchMatMul = Module[\"_BatchMatMul\"] = Module[\"asm\"][\"BatchMatMul\"]).apply(null, arguments);\n };\n var _Ceil = Module[\"_Ceil\"] = function() {\n return (_Ceil = Module[\"_Ceil\"] = Module[\"asm\"][\"Ceil\"]).apply(null, arguments);\n };\n var _ClipByValue = Module[\"_ClipByValue\"] = function() {\n return (_ClipByValue = Module[\"_ClipByValue\"] = Module[\"asm\"][\"ClipByValue\"]).apply(null, arguments);\n };\n var _Conv2D = Module[\"_Conv2D\"] = function() {\n return (_Conv2D = Module[\"_Conv2D\"] = Module[\"asm\"][\"Conv2D\"]).apply(null, arguments);\n };\n var _Conv2DBackpropInput = Module[\"_Conv2DBackpropInput\"] = function() {\n return (_Conv2DBackpropInput = Module[\"_Conv2DBackpropInput\"] = Module[\"asm\"][\"Conv2DBackpropInput\"]).apply(null, arguments);\n };\n var _Cos = Module[\"_Cos\"] = function() {\n return (_Cos = Module[\"_Cos\"] = Module[\"asm\"][\"Cos\"]).apply(null, arguments);\n };\n var _Cosh = Module[\"_Cosh\"] = function() {\n return (_Cosh = Module[\"_Cosh\"] = Module[\"asm\"][\"Cosh\"]).apply(null, arguments);\n };\n var _CropAndResize = Module[\"_CropAndResize\"] = function() {\n return (_CropAndResize = Module[\"_CropAndResize\"] = Module[\"asm\"][\"CropAndResize\"]).apply(null, arguments);\n };\n var _Cumprod = Module[\"_Cumprod\"] = function() {\n return (_Cumprod = Module[\"_Cumprod\"] = Module[\"asm\"][\"Cumprod\"]).apply(null, arguments);\n };\n var _Cumsum = Module[\"_Cumsum\"] = function() {\n return (_Cumsum = Module[\"_Cumsum\"] = Module[\"asm\"][\"Cumsum\"]).apply(null, arguments);\n };\n var _DepthToSpace = Module[\"_DepthToSpace\"] = function() {\n return (_DepthToSpace = Module[\"_DepthToSpace\"] = Module[\"asm\"][\"DepthToSpace\"]).apply(null, arguments);\n };\n var _DepthwiseConv2dNative = Module[\"_DepthwiseConv2dNative\"] = function() {\n return (_DepthwiseConv2dNative = Module[\"_DepthwiseConv2dNative\"] = Module[\"asm\"][\"DepthwiseConv2dNative\"]).apply(null, arguments);\n };\n var _Elu = Module[\"_Elu\"] = function() {\n return (_Elu = Module[\"_Elu\"] = Module[\"asm\"][\"Elu\"]).apply(null, arguments);\n };\n var _Equal = Module[\"_Equal\"] = function() {\n return (_Equal = Module[\"_Equal\"] = Module[\"asm\"][\"Equal\"]).apply(null, arguments);\n };\n var _Exp = Module[\"_Exp\"] = function() {\n return (_Exp = Module[\"_Exp\"] = Module[\"asm\"][\"Exp\"]).apply(null, arguments);\n };\n var _FlipLeftRight = Module[\"_FlipLeftRight\"] = function() {\n return (_FlipLeftRight = Module[\"_FlipLeftRight\"] = Module[\"asm\"][\"FlipLeftRight\"]).apply(null, arguments);\n };\n var _Floor = Module[\"_Floor\"] = function() {\n return (_Floor = Module[\"_Floor\"] = Module[\"asm\"][\"Floor\"]).apply(null, arguments);\n };\n var _FloorDiv = Module[\"_FloorDiv\"] = function() {\n return (_FloorDiv = Module[\"_FloorDiv\"] = Module[\"asm\"][\"FloorDiv\"]).apply(null, arguments);\n };\n var _FusedBatchNorm = Module[\"_FusedBatchNorm\"] = function() {\n return (_FusedBatchNorm = Module[\"_FusedBatchNorm\"] = Module[\"asm\"][\"FusedBatchNorm\"]).apply(null, arguments);\n };\n var _FusedConv2D = Module[\"_FusedConv2D\"] = function() {\n return (_FusedConv2D = Module[\"_FusedConv2D\"] = Module[\"asm\"][\"FusedConv2D\"]).apply(null, arguments);\n };\n var _FusedDepthwiseConv2D = Module[\"_FusedDepthwiseConv2D\"] = function() {\n return (_FusedDepthwiseConv2D = Module[\"_FusedDepthwiseConv2D\"] = Module[\"asm\"][\"FusedDepthwiseConv2D\"]).apply(null, arguments);\n };\n var _Gather = Module[\"_Gather\"] = function() {\n return (_Gather = Module[\"_Gather\"] = Module[\"asm\"][\"Gather\"]).apply(null, arguments);\n };\n var _GatherNd = Module[\"_GatherNd\"] = function() {\n return (_GatherNd = Module[\"_GatherNd\"] = Module[\"asm\"][\"GatherNd\"]).apply(null, arguments);\n };\n var _Greater = Module[\"_Greater\"] = function() {\n return (_Greater = Module[\"_Greater\"] = Module[\"asm\"][\"Greater\"]).apply(null, arguments);\n };\n var _GreaterEqual = Module[\"_GreaterEqual\"] = function() {\n return (_GreaterEqual = Module[\"_GreaterEqual\"] = Module[\"asm\"][\"GreaterEqual\"]).apply(null, arguments);\n };\n var _LeakyRelu = Module[\"_LeakyRelu\"] = function() {\n return (_LeakyRelu = Module[\"_LeakyRelu\"] = Module[\"asm\"][\"LeakyRelu\"]).apply(null, arguments);\n };\n var _Less = Module[\"_Less\"] = function() {\n return (_Less = Module[\"_Less\"] = Module[\"asm\"][\"Less\"]).apply(null, arguments);\n };\n var _LessEqual = Module[\"_LessEqual\"] = function() {\n return (_LessEqual = Module[\"_LessEqual\"] = Module[\"asm\"][\"LessEqual\"]).apply(null, arguments);\n };\n var _Log = Module[\"_Log\"] = function() {\n return (_Log = Module[\"_Log\"] = Module[\"asm\"][\"Log\"]).apply(null, arguments);\n };\n var _LogicalAnd = Module[\"_LogicalAnd\"] = function() {\n return (_LogicalAnd = Module[\"_LogicalAnd\"] = Module[\"asm\"][\"LogicalAnd\"]).apply(null, arguments);\n };\n var _LogicalNot = Module[\"_LogicalNot\"] = function() {\n return (_LogicalNot = Module[\"_LogicalNot\"] = Module[\"asm\"][\"LogicalNot\"]).apply(null, arguments);\n };\n var _LogicalOr = Module[\"_LogicalOr\"] = function() {\n return (_LogicalOr = Module[\"_LogicalOr\"] = Module[\"asm\"][\"LogicalOr\"]).apply(null, arguments);\n };\n var _LogicalXor = Module[\"_LogicalXor\"] = function() {\n return (_LogicalXor = Module[\"_LogicalXor\"] = Module[\"asm\"][\"LogicalXor\"]).apply(null, arguments);\n };\n var _Max = Module[\"_Max\"] = function() {\n return (_Max = Module[\"_Max\"] = Module[\"asm\"][\"Max\"]).apply(null, arguments);\n };\n var _MaxPool = Module[\"_MaxPool\"] = function() {\n return (_MaxPool = Module[\"_MaxPool\"] = Module[\"asm\"][\"MaxPool\"]).apply(null, arguments);\n };\n var _Maximum = Module[\"_Maximum\"] = function() {\n return (_Maximum = Module[\"_Maximum\"] = Module[\"asm\"][\"Maximum\"]).apply(null, arguments);\n };\n var _Mean = Module[\"_Mean\"] = function() {\n return (_Mean = Module[\"_Mean\"] = Module[\"asm\"][\"Mean\"]).apply(null, arguments);\n };\n var _Min = Module[\"_Min\"] = function() {\n return (_Min = Module[\"_Min\"] = Module[\"asm\"][\"Min\"]).apply(null, arguments);\n };\n var _Minimum = Module[\"_Minimum\"] = function() {\n return (_Minimum = Module[\"_Minimum\"] = Module[\"asm\"][\"Minimum\"]).apply(null, arguments);\n };\n var _MirrorPad = Module[\"_MirrorPad\"] = function() {\n return (_MirrorPad = Module[\"_MirrorPad\"] = Module[\"asm\"][\"MirrorPad\"]).apply(null, arguments);\n };\n var _Multiply = Module[\"_Multiply\"] = function() {\n return (_Multiply = Module[\"_Multiply\"] = Module[\"asm\"][\"Multiply\"]).apply(null, arguments);\n };\n var _Neg = Module[\"_Neg\"] = function() {\n return (_Neg = Module[\"_Neg\"] = Module[\"asm\"][\"Neg\"]).apply(null, arguments);\n };\n var _NonMaxSuppressionV3 = Module[\"_NonMaxSuppressionV3\"] = function() {\n return (_NonMaxSuppressionV3 = Module[\"_NonMaxSuppressionV3\"] = Module[\"asm\"][\"NonMaxSuppressionV3\"]).apply(null, arguments);\n };\n var _NonMaxSuppressionV4 = Module[\"_NonMaxSuppressionV4\"] = function() {\n return (_NonMaxSuppressionV4 = Module[\"_NonMaxSuppressionV4\"] = Module[\"asm\"][\"NonMaxSuppressionV4\"]).apply(null, arguments);\n };\n var _NonMaxSuppressionV5 = Module[\"_NonMaxSuppressionV5\"] = function() {\n return (_NonMaxSuppressionV5 = Module[\"_NonMaxSuppressionV5\"] = Module[\"asm\"][\"NonMaxSuppressionV5\"]).apply(null, arguments);\n };\n var _NotEqual = Module[\"_NotEqual\"] = function() {\n return (_NotEqual = Module[\"_NotEqual\"] = Module[\"asm\"][\"NotEqual\"]).apply(null, arguments);\n };\n var _OneHot = Module[\"_OneHot\"] = function() {\n return (_OneHot = Module[\"_OneHot\"] = Module[\"asm\"][\"OneHot\"]).apply(null, arguments);\n };\n var _PadV2 = Module[\"_PadV2\"] = function() {\n return (_PadV2 = Module[\"_PadV2\"] = Module[\"asm\"][\"PadV2\"]).apply(null, arguments);\n };\n var _Pow = Module[\"_Pow\"] = function() {\n return (_Pow = Module[\"_Pow\"] = Module[\"asm\"][\"Pow\"]).apply(null, arguments);\n };\n var _Prelu = Module[\"_Prelu\"] = function() {\n return (_Prelu = Module[\"_Prelu\"] = Module[\"asm\"][\"Prelu\"]).apply(null, arguments);\n };\n var _Prod = Module[\"_Prod\"] = function() {\n return (_Prod = Module[\"_Prod\"] = Module[\"asm\"][\"Prod\"]).apply(null, arguments);\n };\n var _RealDiv = Module[\"_RealDiv\"] = function() {\n return (_RealDiv = Module[\"_RealDiv\"] = Module[\"asm\"][\"RealDiv\"]).apply(null, arguments);\n };\n var _Relu = Module[\"_Relu\"] = function() {\n return (_Relu = Module[\"_Relu\"] = Module[\"asm\"][\"Relu\"]).apply(null, arguments);\n };\n var _Relu6 = Module[\"_Relu6\"] = function() {\n return (_Relu6 = Module[\"_Relu6\"] = Module[\"asm\"][\"Relu6\"]).apply(null, arguments);\n };\n var _ResizeBilinear = Module[\"_ResizeBilinear\"] = function() {\n return (_ResizeBilinear = Module[\"_ResizeBilinear\"] = Module[\"asm\"][\"ResizeBilinear\"]).apply(null, arguments);\n };\n var _ResizeNearestNeighbor = Module[\"_ResizeNearestNeighbor\"] = function() {\n return (_ResizeNearestNeighbor = Module[\"_ResizeNearestNeighbor\"] = Module[\"asm\"][\"ResizeNearestNeighbor\"]).apply(null, arguments);\n };\n var _Reverse = Module[\"_Reverse\"] = function() {\n return (_Reverse = Module[\"_Reverse\"] = Module[\"asm\"][\"Reverse\"]).apply(null, arguments);\n };\n var _RotateWithOffset = Module[\"_RotateWithOffset\"] = function() {\n return (_RotateWithOffset = Module[\"_RotateWithOffset\"] = Module[\"asm\"][\"RotateWithOffset\"]).apply(null, arguments);\n };\n var _Round = Module[\"_Round\"] = function() {\n return (_Round = Module[\"_Round\"] = Module[\"asm\"][\"Round\"]).apply(null, arguments);\n };\n var _Rsqrt = Module[\"_Rsqrt\"] = function() {\n return (_Rsqrt = Module[\"_Rsqrt\"] = Module[\"asm\"][\"Rsqrt\"]).apply(null, arguments);\n };\n var _ScatterNd = Module[\"_ScatterNd\"] = function() {\n return (_ScatterNd = Module[\"_ScatterNd\"] = Module[\"asm\"][\"ScatterNd\"]).apply(null, arguments);\n };\n var _SelectV2 = Module[\"_SelectV2\"] = function() {\n return (_SelectV2 = Module[\"_SelectV2\"] = Module[\"asm\"][\"SelectV2\"]).apply(null, arguments);\n };\n var _Sigmoid = Module[\"_Sigmoid\"] = function() {\n return (_Sigmoid = Module[\"_Sigmoid\"] = Module[\"asm\"][\"Sigmoid\"]).apply(null, arguments);\n };\n var _Sin = Module[\"_Sin\"] = function() {\n return (_Sin = Module[\"_Sin\"] = Module[\"asm\"][\"Sin\"]).apply(null, arguments);\n };\n var _Softmax = Module[\"_Softmax\"] = function() {\n return (_Softmax = Module[\"_Softmax\"] = Module[\"asm\"][\"Softmax\"]).apply(null, arguments);\n };\n var _SparseFillEmptyRows = Module[\"_SparseFillEmptyRows\"] = function() {\n return (_SparseFillEmptyRows = Module[\"_SparseFillEmptyRows\"] = Module[\"asm\"][\"SparseFillEmptyRows\"]).apply(null, arguments);\n };\n var _SparseReshape = Module[\"_SparseReshape\"] = function() {\n return (_SparseReshape = Module[\"_SparseReshape\"] = Module[\"asm\"][\"SparseReshape\"]).apply(null, arguments);\n };\n var _SparseSegmentReduction = Module[\"_SparseSegmentReduction\"] = function() {\n return (_SparseSegmentReduction = Module[\"_SparseSegmentReduction\"] = Module[\"asm\"][\"SparseSegmentReduction\"]).apply(null, arguments);\n };\n var _Sqrt = Module[\"_Sqrt\"] = function() {\n return (_Sqrt = Module[\"_Sqrt\"] = Module[\"asm\"][\"Sqrt\"]).apply(null, arguments);\n };\n var _Square = Module[\"_Square\"] = function() {\n return (_Square = Module[\"_Square\"] = Module[\"asm\"][\"Square\"]).apply(null, arguments);\n };\n var _SquaredDifference = Module[\"_SquaredDifference\"] = function() {\n return (_SquaredDifference = Module[\"_SquaredDifference\"] = Module[\"asm\"][\"SquaredDifference\"]).apply(null, arguments);\n };\n var _Step = Module[\"_Step\"] = function() {\n return (_Step = Module[\"_Step\"] = Module[\"asm\"][\"Step\"]).apply(null, arguments);\n };\n var _StridedSlice = Module[\"_StridedSlice\"] = function() {\n return (_StridedSlice = Module[\"_StridedSlice\"] = Module[\"asm\"][\"StridedSlice\"]).apply(null, arguments);\n };\n var _Sub = Module[\"_Sub\"] = function() {\n return (_Sub = Module[\"_Sub\"] = Module[\"asm\"][\"Sub\"]).apply(null, arguments);\n };\n var _Sum = Module[\"_Sum\"] = function() {\n return (_Sum = Module[\"_Sum\"] = Module[\"asm\"][\"Sum\"]).apply(null, arguments);\n };\n var _Tan = Module[\"_Tan\"] = function() {\n return (_Tan = Module[\"_Tan\"] = Module[\"asm\"][\"Tan\"]).apply(null, arguments);\n };\n var _Tanh = Module[\"_Tanh\"] = function() {\n return (_Tanh = Module[\"_Tanh\"] = Module[\"asm\"][\"Tanh\"]).apply(null, arguments);\n };\n var _Tile = Module[\"_Tile\"] = function() {\n return (_Tile = Module[\"_Tile\"] = Module[\"asm\"][\"Tile\"]).apply(null, arguments);\n };\n var _TopK = Module[\"_TopK\"] = function() {\n return (_TopK = Module[\"_TopK\"] = Module[\"asm\"][\"TopK\"]).apply(null, arguments);\n };\n var _Transform = Module[\"_Transform\"] = function() {\n return (_Transform = Module[\"_Transform\"] = Module[\"asm\"][\"Transform\"]).apply(null, arguments);\n };\n var _Transpose = Module[\"_Transpose\"] = function() {\n return (_Transpose = Module[\"_Transpose\"] = Module[\"asm\"][\"Transpose\"]).apply(null, arguments);\n };\n var __FusedMatMul = Module[\"__FusedMatMul\"] = function() {\n return (__FusedMatMul = Module[\"__FusedMatMul\"] = Module[\"asm\"][\"_FusedMatMul\"]).apply(null, arguments);\n };\n var _malloc = Module[\"_malloc\"] = function() {\n return (_malloc = Module[\"_malloc\"] = Module[\"asm\"][\"malloc\"]).apply(null, arguments);\n };\n var _free = Module[\"_free\"] = function() {\n return (_free = Module[\"_free\"] = Module[\"asm\"][\"free\"]).apply(null, arguments);\n };\n var ___errno_location = Module[\"___errno_location\"] = function() {\n return (___errno_location = Module[\"___errno_location\"] = Module[\"asm\"][\"__errno_location\"]).apply(null, arguments);\n };\n var _emscripten_main_thread_process_queued_calls = Module[\"_emscripten_main_thread_process_queued_calls\"] = function() {\n return (_emscripten_main_thread_process_queued_calls = Module[\"_emscripten_main_thread_process_queued_calls\"] = Module[\"asm\"][\"emscripten_main_thread_process_queued_calls\"]).apply(null, arguments);\n };\n var stackSave = Module[\"stackSave\"] = function() {\n return (stackSave = Module[\"stackSave\"] = Module[\"asm\"][\"stackSave\"]).apply(null, arguments);\n };\n var stackRestore = Module[\"stackRestore\"] = function() {\n return (stackRestore = Module[\"stackRestore\"] = Module[\"asm\"][\"stackRestore\"]).apply(null, arguments);\n };\n var stackAlloc = Module[\"stackAlloc\"] = function() {\n return (stackAlloc = Module[\"stackAlloc\"] = Module[\"asm\"][\"stackAlloc\"]).apply(null, arguments);\n };\n var dynCall_iijjiiii = Module[\"dynCall_iijjiiii\"] = function() {\n return (dynCall_iijjiiii = Module[\"dynCall_iijjiiii\"] = Module[\"asm\"][\"dynCall_iijjiiii\"]).apply(null, arguments);\n };\n var dynCall_jiji = Module[\"dynCall_jiji\"] = function() {\n return (dynCall_jiji = Module[\"dynCall_jiji\"] = Module[\"asm\"][\"dynCall_jiji\"]).apply(null, arguments);\n };\n Module[\"cwrap\"] = cwrap;\n var calledRun;\n function ExitStatus(status) {\n this.name = \"ExitStatus\";\n this.message = \"Program terminated with exit(\" + status + \")\";\n this.status = status;\n }\n dependenciesFulfilled = function runCaller() {\n if (!calledRun)\n run();\n if (!calledRun)\n dependenciesFulfilled = runCaller;\n };\n function run(args) {\n args = args || arguments_;\n if (runDependencies > 0) {\n return;\n }\n preRun();\n if (runDependencies > 0) {\n return;\n }\n function doRun() {\n if (calledRun)\n return;\n calledRun = true;\n Module[\"calledRun\"] = true;\n if (ABORT)\n return;\n initRuntime();\n readyPromiseResolve(Module);\n if (Module[\"onRuntimeInitialized\"])\n Module[\"onRuntimeInitialized\"]();\n postRun();\n }\n if (Module[\"setStatus\"]) {\n Module[\"setStatus\"](\"Running...\");\n setTimeout(function() {\n setTimeout(function() {\n Module[\"setStatus\"](\"\");\n }, 1);\n doRun();\n }, 1);\n } else {\n doRun();\n }\n }\n Module[\"run\"] = run;\n function procExit(code) {\n EXITSTATUS = code;\n if (!keepRuntimeAlive()) {\n if (Module[\"onExit\"])\n Module[\"onExit\"](code);\n ABORT = true;\n }\n quit_(code, new ExitStatus(code));\n }\n if (Module[\"preInit\"]) {\n if (typeof Module[\"preInit\"] == \"function\")\n Module[\"preInit\"] = [Module[\"preInit\"]];\n while (Module[\"preInit\"].length > 0) {\n Module[\"preInit\"].pop()();\n }\n }\n run();\n var listenersAdded;\n if (beforeListeners) {\n listenersAdded = { uncaughtException: process.listeners(\"uncaughtException\").filter(function(listener) {\n return !beforeListeners.uncaughtException.indexOf(listener) > -1;\n }), unhandledRejection: process.listeners(\"unhandledRejection\").filter(function(listener) {\n return !beforeListeners.unhandledRejection.indexOf(listener) > -1;\n }) };\n }\n var actualModule;\n if (typeof WasmBackendModule3 !== \"undefined\") {\n actualModule = WasmBackendModule3;\n } else if (typeof WasmBackendModuleThreadedSimd !== \"undefined\") {\n actualModule = WasmBackendModuleThreadedSimd;\n } else {\n throw new Error(\"Could not find wasm module in post.js\");\n }\n if (listenersAdded) {\n var tmpDispose = actualModule[\"_dispose\"];\n actualModule[\"_dispose\"] = function() {\n tmpDispose();\n listenersAdded.uncaughtException.forEach(function(listener) {\n process.removeListener(\"uncaughtException\", listener);\n });\n listenersAdded.unhandledRejection.forEach(function(listener) {\n process.removeListener(\"unhandledRejection\", listener);\n });\n };\n }\n return WasmBackendModule3.ready;\n };\n })();\n if (typeof exports === \"object\" && typeof module === \"object\")\n module.exports = WasmBackendModule2;\n else if (typeof define === \"function\" && define[\"amd\"])\n define([], function() {\n return WasmBackendModule2;\n });\n else if (typeof exports === \"object\")\n exports[\"WasmBackendModule\"] = WasmBackendModule2;\n }\n});\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/backends/backend.js\nvar EPSILON_FLOAT32 = 1e-7;\nvar EPSILON_FLOAT16 = 1e-4;\nvar DataStorage = class {\n constructor(backend2, dataMover) {\n this.backend = backend2;\n this.dataMover = dataMover;\n this.data = /* @__PURE__ */ new WeakMap();\n this.dataIdsCount = 0;\n }\n get(dataId) {\n if (!this.data.has(dataId)) {\n this.dataMover.moveData(this.backend, dataId);\n }\n return this.data.get(dataId);\n }\n set(dataId, value) {\n this.dataIdsCount++;\n this.data.set(dataId, value);\n }\n has(dataId) {\n return this.data.has(dataId);\n }\n delete(dataId) {\n this.dataIdsCount--;\n return this.data.delete(dataId);\n }\n numDataIds() {\n return this.dataIdsCount;\n }\n};\nvar KernelBackend = class {\n refCount(dataId) {\n return notYetImplemented(\"refCount\");\n }\n incRef(dataId) {\n return notYetImplemented(\"incRef\");\n }\n timerAvailable() {\n return true;\n }\n time(f) {\n return notYetImplemented(\"time\");\n }\n read(dataId) {\n return notYetImplemented(\"read\");\n }\n readSync(dataId) {\n return notYetImplemented(\"readSync\");\n }\n readToGPU(dataId, options) {\n return notYetImplemented(\"readToGPU\");\n }\n numDataIds() {\n return notYetImplemented(\"numDataIds\");\n }\n disposeData(dataId, force) {\n return notYetImplemented(\"disposeData\");\n }\n write(values, shape, dtype) {\n return notYetImplemented(\"write\");\n }\n move(dataId, values, shape, dtype, refCount) {\n return notYetImplemented(\"move\");\n }\n memory() {\n return notYetImplemented(\"memory\");\n }\n floatPrecision() {\n return notYetImplemented(\"floatPrecision\");\n }\n epsilon() {\n return this.floatPrecision() === 32 ? EPSILON_FLOAT32 : EPSILON_FLOAT16;\n }\n dispose() {\n return notYetImplemented(\"dispose\");\n }\n};\nfunction notYetImplemented(kernelName) {\n throw new Error(`'${kernelName}' not yet implemented or not found in the registry. This kernel may not be supported by the tfjs backend you have chosen`);\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/util_base.js\nfunction shuffle(array2) {\n let counter = array2.length;\n let index = 0;\n while (counter > 0) {\n index = Math.random() * counter | 0;\n counter--;\n swap(array2, counter, index);\n }\n}\nfunction shuffleCombo(array2, array22) {\n if (array2.length !== array22.length) {\n throw new Error(`Array sizes must match to be shuffled together First array length was ${array2.length}Second array length was ${array22.length}`);\n }\n let counter = array2.length;\n let index = 0;\n while (counter > 0) {\n index = Math.random() * counter | 0;\n counter--;\n swap(array2, counter, index);\n swap(array22, counter, index);\n }\n}\nfunction clamp(min7, x, max7) {\n return Math.max(min7, Math.min(x, max7));\n}\nfunction nearestLargerEven(val) {\n return val % 2 === 0 ? val : val + 1;\n}\nfunction swap(object, left, right) {\n const temp = object[left];\n object[left] = object[right];\n object[right] = temp;\n}\nfunction sum(arr) {\n let sum7 = 0;\n for (let i2 = 0; i2 < arr.length; i2++) {\n sum7 += arr[i2];\n }\n return sum7;\n}\nfunction randUniform(a, b) {\n const r2 = Math.random();\n return b * r2 + (1 - r2) * a;\n}\nfunction distSquared(a, b) {\n let result = 0;\n for (let i2 = 0; i2 < a.length; i2++) {\n const diff = Number(a[i2]) - Number(b[i2]);\n result += diff * diff;\n }\n return result;\n}\nfunction assert(expr, msg) {\n if (!expr) {\n throw new Error(typeof msg === \"string\" ? msg : msg());\n }\n}\nfunction assertShapesMatch(shapeA, shapeB, errorMessagePrefix = \"\") {\n assert(arraysEqual(shapeA, shapeB), () => errorMessagePrefix + ` Shapes ${shapeA} and ${shapeB} must match`);\n}\nfunction assertNonNull(a) {\n assert(a != null, () => `The input to the tensor constructor must be a non-null value.`);\n}\nfunction flatten(arr, result = [], skipTypedArray = false) {\n if (result == null) {\n result = [];\n }\n if (Array.isArray(arr) || isTypedArray(arr) && !skipTypedArray) {\n for (let i2 = 0; i2 < arr.length; ++i2) {\n flatten(arr[i2], result, skipTypedArray);\n }\n } else {\n result.push(arr);\n }\n return result;\n}\nfunction sizeFromShape(shape) {\n if (shape.length === 0) {\n return 1;\n }\n let size = shape[0];\n for (let i2 = 1; i2 < shape.length; i2++) {\n size *= shape[i2];\n }\n return size;\n}\nfunction isScalarShape(shape) {\n return shape.length === 0;\n}\nfunction arraysEqual(n1, n2) {\n if (n1 === n2) {\n return true;\n }\n if (n1 == null || n2 == null) {\n return false;\n }\n if (n1.length !== n2.length) {\n return false;\n }\n for (let i2 = 0; i2 < n1.length; i2++) {\n if (n1[i2] !== n2[i2]) {\n return false;\n }\n }\n return true;\n}\nfunction isInt(a) {\n return a % 1 === 0;\n}\nfunction tanh(x) {\n if (Math.tanh != null) {\n return Math.tanh(x);\n }\n if (x === Infinity) {\n return 1;\n } else if (x === -Infinity) {\n return -1;\n } else {\n const e2x = Math.exp(2 * x);\n return (e2x - 1) / (e2x + 1);\n }\n}\nfunction sizeToSquarishShape(size) {\n const width = Math.ceil(Math.sqrt(size));\n return [width, Math.ceil(size / width)];\n}\nfunction createShuffledIndices(n2) {\n const shuffledIndices = new Uint32Array(n2);\n for (let i2 = 0; i2 < n2; ++i2) {\n shuffledIndices[i2] = i2;\n }\n shuffle(shuffledIndices);\n return shuffledIndices;\n}\nfunction rightPad(a, size) {\n if (size <= a.length) {\n return a;\n }\n return a + \" \".repeat(size - a.length);\n}\nfunction repeatedTry(checkFn, delayFn = (counter) => 0, maxCounter) {\n return new Promise((resolve, reject) => {\n let tryCount = 0;\n const tryFn = () => {\n if (checkFn()) {\n resolve();\n return;\n }\n tryCount++;\n const nextBackoff = delayFn(tryCount);\n if (maxCounter != null && tryCount >= maxCounter) {\n reject();\n return;\n }\n setTimeout(tryFn, nextBackoff);\n };\n tryFn();\n });\n}\nfunction inferFromImplicitShape(shape, size) {\n let shapeProd = 1;\n let implicitIdx = -1;\n for (let i2 = 0; i2 < shape.length; ++i2) {\n if (shape[i2] >= 0) {\n shapeProd *= shape[i2];\n } else if (shape[i2] === -1) {\n if (implicitIdx !== -1) {\n throw Error(`Shapes can only have 1 implicit size. Found -1 at dim ${implicitIdx} and dim ${i2}`);\n }\n implicitIdx = i2;\n } else if (shape[i2] < 0) {\n throw Error(`Shapes can not be < 0. Found ${shape[i2]} at dim ${i2}`);\n }\n }\n if (implicitIdx === -1) {\n if (size > 0 && size !== shapeProd) {\n throw Error(`Size(${size}) must match the product of shape ${shape}`);\n }\n return shape;\n }\n if (shapeProd === 0) {\n throw Error(`Cannot infer the missing size in [${shape}] when there are 0 elements`);\n }\n if (size % shapeProd !== 0) {\n throw Error(`The implicit shape can't be a fractional number. Got ${size} / ${shapeProd}`);\n }\n const newShape = shape.slice();\n newShape[implicitIdx] = size / shapeProd;\n return newShape;\n}\nfunction parseAxisParam(axis, shape) {\n const rank = shape.length;\n axis = axis == null ? shape.map((s2, i2) => i2) : [].concat(axis);\n assert(axis.every((ax) => ax >= -rank && ax < rank), () => `All values in axis param must be in range [-${rank}, ${rank}) but got axis ${axis}`);\n assert(axis.every((ax) => isInt(ax)), () => `All values in axis param must be integers but got axis ${axis}`);\n return axis.map((a) => a < 0 ? rank + a : a);\n}\nfunction squeezeShape(shape, axis) {\n const newShape = [];\n const keptDims = [];\n const isEmptyArray = axis != null && Array.isArray(axis) && axis.length === 0;\n const axes = axis == null || isEmptyArray ? null : parseAxisParam(axis, shape).sort();\n let j = 0;\n for (let i2 = 0; i2 < shape.length; ++i2) {\n if (axes != null) {\n if (axes[j] === i2 && shape[i2] !== 1) {\n throw new Error(`Can't squeeze axis ${i2} since its dim '${shape[i2]}' is not 1`);\n }\n if ((axes[j] == null || axes[j] > i2) && shape[i2] === 1) {\n newShape.push(shape[i2]);\n keptDims.push(i2);\n }\n if (axes[j] <= i2) {\n j++;\n }\n }\n if (shape[i2] !== 1) {\n newShape.push(shape[i2]);\n keptDims.push(i2);\n }\n }\n return { newShape, keptDims };\n}\nfunction getTypedArrayFromDType(dtype, size) {\n let values = null;\n if (dtype == null || dtype === \"float32\") {\n values = new Float32Array(size);\n } else if (dtype === \"int32\") {\n values = new Int32Array(size);\n } else if (dtype === \"bool\") {\n values = new Uint8Array(size);\n } else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n return values;\n}\nfunction getArrayFromDType(dtype, size) {\n let values = null;\n if (dtype == null || dtype === \"float32\") {\n values = new Float32Array(size);\n } else if (dtype === \"int32\") {\n values = new Int32Array(size);\n } else if (dtype === \"bool\") {\n values = new Uint8Array(size);\n } else if (dtype === \"string\") {\n values = new Array(size);\n } else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n return values;\n}\nfunction checkConversionForErrors(vals, dtype) {\n for (let i2 = 0; i2 < vals.length; i2++) {\n const num = vals[i2];\n if (isNaN(num) || !isFinite(num)) {\n throw Error(`A tensor of type ${dtype} being uploaded contains ${num}.`);\n }\n }\n}\nfunction isValidDtype(dtype) {\n return dtype === \"bool\" || dtype === \"complex64\" || dtype === \"float32\" || dtype === \"int32\" || dtype === \"string\";\n}\nfunction hasEncodingLoss(oldType, newType) {\n if (newType === \"complex64\") {\n return false;\n }\n if (newType === \"float32\" && oldType !== \"complex64\") {\n return false;\n }\n if (newType === \"int32\" && oldType !== \"float32\" && oldType !== \"complex64\") {\n return false;\n }\n if (newType === \"bool\" && oldType === \"bool\") {\n return false;\n }\n return true;\n}\nfunction isTypedArray(a) {\n return a instanceof Float32Array || a instanceof Int32Array || a instanceof Uint8Array || a instanceof Uint8ClampedArray;\n}\nfunction bytesPerElement(dtype) {\n if (dtype === \"float32\" || dtype === \"int32\") {\n return 4;\n } else if (dtype === \"complex64\") {\n return 8;\n } else if (dtype === \"bool\") {\n return 1;\n } else {\n throw new Error(`Unknown dtype ${dtype}`);\n }\n}\nfunction bytesFromStringArray(arr) {\n if (arr == null) {\n return 0;\n }\n let bytes = 0;\n arr.forEach((x) => bytes += x.length);\n return bytes;\n}\nfunction isString(value) {\n return typeof value === \"string\" || value instanceof String;\n}\nfunction isBoolean(value) {\n return typeof value === \"boolean\";\n}\nfunction isNumber(value) {\n return typeof value === \"number\";\n}\nfunction inferDtype(values) {\n if (Array.isArray(values)) {\n return inferDtype(values[0]);\n }\n if (values instanceof Float32Array) {\n return \"float32\";\n } else if (values instanceof Int32Array || values instanceof Uint8Array || values instanceof Uint8ClampedArray) {\n return \"int32\";\n } else if (isNumber(values)) {\n return \"float32\";\n } else if (isString(values)) {\n return \"string\";\n } else if (isBoolean(values)) {\n return \"bool\";\n }\n return \"float32\";\n}\nfunction isFunction(f) {\n return !!(f && f.constructor && f.call && f.apply);\n}\nfunction nearestDivisor(size, start) {\n for (let i2 = start; i2 < size; ++i2) {\n if (size % i2 === 0) {\n return i2;\n }\n }\n return size;\n}\nfunction computeStrides(shape) {\n const rank = shape.length;\n if (rank < 2) {\n return [];\n }\n const strides = new Array(rank - 1);\n strides[rank - 2] = shape[rank - 1];\n for (let i2 = rank - 3; i2 >= 0; --i2) {\n strides[i2] = strides[i2 + 1] * shape[i2 + 1];\n }\n return strides;\n}\nfunction createNestedArray(offset, shape, a, isComplex = false) {\n const ret = new Array();\n if (shape.length === 1) {\n const d = shape[0] * (isComplex ? 2 : 1);\n for (let i2 = 0; i2 < d; i2++) {\n ret[i2] = a[offset + i2];\n }\n } else {\n const d = shape[0];\n const rest = shape.slice(1);\n const len = rest.reduce((acc, c) => acc * c) * (isComplex ? 2 : 1);\n for (let i2 = 0; i2 < d; i2++) {\n ret[i2] = createNestedArray(offset + i2 * len, rest, a, isComplex);\n }\n }\n return ret;\n}\nfunction toNestedArray(shape, a, isComplex = false) {\n if (shape.length === 0) {\n return a[0];\n }\n const size = shape.reduce((acc, c) => acc * c) * (isComplex ? 2 : 1);\n if (size === 0) {\n return [];\n }\n if (size !== a.length) {\n throw new Error(`[${shape}] does not match the input size ${a.length}${isComplex ? \" for a complex tensor\" : \"\"}.`);\n }\n return createNestedArray(0, shape, a, isComplex);\n}\nfunction makeOnesTypedArray(size, dtype) {\n const array2 = makeZerosTypedArray(size, dtype);\n for (let i2 = 0; i2 < array2.length; i2++) {\n array2[i2] = 1;\n }\n return array2;\n}\nfunction makeZerosTypedArray(size, dtype) {\n if (dtype == null || dtype === \"float32\" || dtype === \"complex64\") {\n return new Float32Array(size);\n } else if (dtype === \"int32\") {\n return new Int32Array(size);\n } else if (dtype === \"bool\") {\n return new Uint8Array(size);\n } else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\nfunction makeZerosNestedTypedArray(shape, dtype) {\n const size = shape.reduce((prev, curr) => prev * curr, 1);\n if (dtype == null || dtype === \"float32\") {\n return toNestedArray(shape, new Float32Array(size));\n } else if (dtype === \"int32\") {\n return toNestedArray(shape, new Int32Array(size));\n } else if (dtype === \"bool\") {\n return toNestedArray(shape, new Uint8Array(size));\n } else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\nfunction assertNonNegativeIntegerDimensions(shape) {\n shape.forEach((dimSize) => {\n assert(Number.isInteger(dimSize) && dimSize >= 0, () => `Tensor must have a shape comprised of positive integers but got shape [${shape}].`);\n });\n}\nfunction locToIndex(locs, rank, strides) {\n if (rank === 0) {\n return 0;\n } else if (rank === 1) {\n return locs[0];\n }\n let index = locs[locs.length - 1];\n for (let i2 = 0; i2 < locs.length - 1; ++i2) {\n index += strides[i2] * locs[i2];\n }\n return index;\n}\nfunction indexToLoc(index, rank, strides) {\n if (rank === 0) {\n return [];\n } else if (rank === 1) {\n return [index];\n }\n const locs = new Array(rank);\n for (let i2 = 0; i2 < locs.length - 1; ++i2) {\n locs[i2] = Math.floor(index / strides[i2]);\n index -= locs[i2] * strides[i2];\n }\n locs[locs.length - 1] = index;\n return locs;\n}\nfunction isPromise(object) {\n return object && object.then && typeof object.then === \"function\";\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/environment.js\nvar TENSORFLOWJS_FLAGS_PREFIX = \"tfjsflags\";\nvar Environment = class {\n constructor(global2) {\n this.global = global2;\n this.flags = {};\n this.flagRegistry = {};\n this.urlFlags = {};\n this.getQueryParams = getQueryParams;\n this.populateURLFlags();\n }\n setPlatform(platformName, platform) {\n if (this.platform != null) {\n if (!(env().getBool(\"IS_TEST\") || env().getBool(\"PROD\"))) {\n console.warn(`Platform ${this.platformName} has already been set. Overwriting the platform with ${platformName}.`);\n }\n }\n this.platformName = platformName;\n this.platform = platform;\n }\n registerFlag(flagName, evaluationFn, setHook) {\n this.flagRegistry[flagName] = { evaluationFn, setHook };\n if (this.urlFlags[flagName] != null) {\n const flagValue = this.urlFlags[flagName];\n if (!(env().getBool(\"IS_TEST\") || env().getBool(\"PROD\"))) {\n console.warn(`Setting feature override from URL ${flagName}: ${flagValue}.`);\n }\n this.set(flagName, flagValue);\n }\n }\n async getAsync(flagName) {\n if (flagName in this.flags) {\n return this.flags[flagName];\n }\n this.flags[flagName] = await this.evaluateFlag(flagName);\n return this.flags[flagName];\n }\n get(flagName) {\n if (flagName in this.flags) {\n return this.flags[flagName];\n }\n const flagValue = this.evaluateFlag(flagName);\n if (isPromise(flagValue)) {\n throw new Error(`Flag ${flagName} cannot be synchronously evaluated. Please use getAsync() instead.`);\n }\n this.flags[flagName] = flagValue;\n return this.flags[flagName];\n }\n getNumber(flagName) {\n return this.get(flagName);\n }\n getBool(flagName) {\n return this.get(flagName);\n }\n getFlags() {\n return this.flags;\n }\n get features() {\n return this.flags;\n }\n set(flagName, value) {\n if (this.flagRegistry[flagName] == null) {\n throw new Error(`Cannot set flag ${flagName} as it has not been registered.`);\n }\n this.flags[flagName] = value;\n if (this.flagRegistry[flagName].setHook != null) {\n this.flagRegistry[flagName].setHook(value);\n }\n }\n evaluateFlag(flagName) {\n if (this.flagRegistry[flagName] == null) {\n throw new Error(`Cannot evaluate flag '${flagName}': no evaluation function found.`);\n }\n return this.flagRegistry[flagName].evaluationFn();\n }\n setFlags(flags) {\n this.flags = Object.assign({}, flags);\n }\n reset() {\n this.flags = {};\n this.urlFlags = {};\n this.populateURLFlags();\n }\n populateURLFlags() {\n if (typeof this.global === \"undefined\" || typeof this.global.location === \"undefined\" || typeof this.global.location.search === \"undefined\") {\n return;\n }\n const urlParams = this.getQueryParams(this.global.location.search);\n if (TENSORFLOWJS_FLAGS_PREFIX in urlParams) {\n const keyValues = urlParams[TENSORFLOWJS_FLAGS_PREFIX].split(\",\");\n keyValues.forEach((keyValue) => {\n const [key, value] = keyValue.split(\":\");\n this.urlFlags[key] = parseValue(key, value);\n });\n }\n }\n};\nfunction getQueryParams(queryString) {\n const params = {};\n queryString.replace(/[?&]([^=?&]+)(?:=([^&]*))?/g, (s2, ...t2) => {\n decodeParam(params, t2[0], t2[1]);\n return t2.join(\"=\");\n });\n return params;\n}\nfunction decodeParam(params, name, value) {\n params[decodeURIComponent(name)] = decodeURIComponent(value || \"\");\n}\nfunction parseValue(flagName, value) {\n value = value.toLowerCase();\n if (value === \"true\" || value === \"false\") {\n return value === \"true\";\n } else if (`${+value}` === value) {\n return +value;\n }\n throw new Error(`Could not parse value flag value ${value} for flag ${flagName}.`);\n}\nfunction env() {\n return ENV;\n}\nvar ENV = null;\nfunction setEnvironmentGlobal(environment) {\n ENV = environment;\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/global_util.js\nvar globalNameSpace;\nfunction getGlobalNamespace() {\n if (globalNameSpace == null) {\n let ns;\n if (typeof window !== \"undefined\") {\n ns = window;\n } else if (typeof global !== \"undefined\") {\n ns = global;\n } else if (typeof process !== \"undefined\") {\n ns = process;\n } else if (typeof self !== \"undefined\") {\n ns = self;\n } else {\n throw new Error(\"Could not find a global object\");\n }\n globalNameSpace = ns;\n }\n return globalNameSpace;\n}\nfunction getGlobalMap() {\n const ns = getGlobalNamespace();\n if (ns._tfGlobals == null) {\n ns._tfGlobals = /* @__PURE__ */ new Map();\n }\n return ns._tfGlobals;\n}\nfunction getGlobal(key, init2) {\n const globalMap = getGlobalMap();\n if (globalMap.has(key)) {\n return globalMap.get(key);\n } else {\n const singleton = init2();\n globalMap.set(key, singleton);\n return globalMap.get(key);\n }\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/kernel_names.js\nvar Abs = \"Abs\";\nvar Acos = \"Acos\";\nvar Acosh = \"Acosh\";\nvar Add = \"Add\";\nvar AddN = \"AddN\";\nvar All = \"All\";\nvar Any = \"Any\";\nvar ArgMax = \"ArgMax\";\nvar ArgMin = \"ArgMin\";\nvar Asin = \"Asin\";\nvar Asinh = \"Asinh\";\nvar Atan = \"Atan\";\nvar Atanh = \"Atanh\";\nvar Atan2 = \"Atan2\";\nvar AvgPool = \"AvgPool\";\nvar AvgPoolGrad = \"AvgPoolGrad\";\nvar AvgPool3D = \"AvgPool3D\";\nvar AvgPool3DGrad = \"AvgPool3DGrad\";\nvar BatchMatMul = \"BatchMatMul\";\nvar BatchToSpaceND = \"BatchToSpaceND\";\nvar Bincount = \"Bincount\";\nvar BroadcastTo = \"BroadcastTo\";\nvar BroadcastArgs = \"BroadcastArgs\";\nvar Cast = \"Cast\";\nvar Ceil = \"Ceil\";\nvar ClipByValue = \"ClipByValue\";\nvar Complex = \"Complex\";\nvar ComplexAbs = \"ComplexAbs\";\nvar Concat = \"Concat\";\nvar Conv2D = \"Conv2D\";\nvar Conv2DBackpropFilter = \"Conv2DBackpropFilter\";\nvar Conv2DBackpropInput = \"Conv2DBackpropInput\";\nvar Conv3D = \"Conv3D\";\nvar Conv3DBackpropFilterV2 = \"Conv3DBackpropFilterV2\";\nvar Conv3DBackpropInputV2 = \"Conv3DBackpropInputV2\";\nvar Cos = \"Cos\";\nvar Cosh = \"Cosh\";\nvar Cumprod = \"Cumprod\";\nvar Cumsum = \"Cumsum\";\nvar CropAndResize = \"CropAndResize\";\nvar DenseBincount = \"DenseBincount\";\nvar DepthToSpace = \"DepthToSpace\";\nvar DepthwiseConv2dNative = \"DepthwiseConv2dNative\";\nvar DepthwiseConv2dNativeBackpropFilter = \"DepthwiseConv2dNativeBackpropFilter\";\nvar DepthwiseConv2dNativeBackpropInput = \"DepthwiseConv2dNativeBackpropInput\";\nvar Diag = \"Diag\";\nvar Dilation2D = \"Dilation2D\";\nvar Dilation2DBackpropInput = \"Dilation2DBackpropInput\";\nvar Dilation2DBackpropFilter = \"Dilation2DBackpropFilter\";\nvar RealDiv = \"RealDiv\";\nvar Einsum = \"Einsum\";\nvar Elu = \"Elu\";\nvar EluGrad = \"EluGrad\";\nvar Erf = \"Erf\";\nvar Equal = \"Equal\";\nvar Exp = \"Exp\";\nvar ExpandDims = \"ExpandDims\";\nvar Expm1 = \"Expm1\";\nvar FFT = \"FFT\";\nvar Fill = \"Fill\";\nvar FlipLeftRight = \"FlipLeftRight\";\nvar Floor = \"Floor\";\nvar FloorDiv = \"FloorDiv\";\nvar FusedBatchNorm = \"FusedBatchNorm\";\nvar GatherV2 = \"GatherV2\";\nvar GatherNd = \"GatherNd\";\nvar Greater = \"Greater\";\nvar GreaterEqual = \"GreaterEqual\";\nvar Identity = \"Identity\";\nvar IFFT = \"IFFT\";\nvar Imag = \"Imag\";\nvar IsFinite = \"IsFinite\";\nvar IsInf = \"IsInf\";\nvar IsNan = \"IsNan\";\nvar LeakyRelu = \"LeakyRelu\";\nvar Less = \"Less\";\nvar LessEqual = \"LessEqual\";\nvar LinSpace = \"LinSpace\";\nvar Log = \"Log\";\nvar Log1p = \"Log1p\";\nvar LogicalAnd = \"LogicalAnd\";\nvar LogicalNot = \"LogicalNot\";\nvar LogicalOr = \"LogicalOr\";\nvar LogicalXor = \"LogicalXor\";\nvar LogSoftmax = \"LogSoftmax\";\nvar LowerBound = \"LowerBound\";\nvar LRN = \"LRN\";\nvar LRNGrad = \"LRNGrad\";\nvar Max = \"Max\";\nvar Maximum = \"Maximum\";\nvar MaxPool = \"MaxPool\";\nvar MaxPoolGrad = \"MaxPoolGrad\";\nvar MaxPool3D = \"MaxPool3D\";\nvar MaxPool3DGrad = \"MaxPool3DGrad\";\nvar MaxPoolWithArgmax = \"MaxPoolWithArgmax\";\nvar Mean = \"Mean\";\nvar Min = \"Min\";\nvar Minimum = \"Minimum\";\nvar MirrorPad = \"MirrorPad\";\nvar Mod = \"Mod\";\nvar Multinomial = \"Multinomial\";\nvar Multiply = \"Multiply\";\nvar Neg = \"Neg\";\nvar NotEqual = \"NotEqual\";\nvar NonMaxSuppressionV3 = \"NonMaxSuppressionV3\";\nvar NonMaxSuppressionV4 = \"NonMaxSuppressionV4\";\nvar NonMaxSuppressionV5 = \"NonMaxSuppressionV5\";\nvar OnesLike = \"OnesLike\";\nvar OneHot = \"OneHot\";\nvar Pack = \"Pack\";\nvar PadV2 = \"PadV2\";\nvar Pool = \"Pool\";\nvar Pow = \"Pow\";\nvar Prelu = \"Prelu\";\nvar Prod = \"Prod\";\nvar Range = \"Range\";\nvar Real = \"Real\";\nvar Reciprocal = \"Reciprocal\";\nvar Relu = \"Relu\";\nvar Reshape = \"Reshape\";\nvar ResizeNearestNeighbor = \"ResizeNearestNeighbor\";\nvar ResizeNearestNeighborGrad = \"ResizeNearestNeighborGrad\";\nvar ResizeBilinear = \"ResizeBilinear\";\nvar ResizeBilinearGrad = \"ResizeBilinearGrad\";\nvar Relu6 = \"Relu6\";\nvar Reverse = \"Reverse\";\nvar Round = \"Round\";\nvar Rsqrt = \"Rsqrt\";\nvar ScatterNd = \"ScatterNd\";\nvar SearchSorted = \"SearchSorted\";\nvar Select = \"Select\";\nvar Selu = \"Selu\";\nvar Slice = \"Slice\";\nvar Sin = \"Sin\";\nvar Sinh = \"Sinh\";\nvar Sign = \"Sign\";\nvar Sigmoid = \"Sigmoid\";\nvar Softplus = \"Softplus\";\nvar Sqrt = \"Sqrt\";\nvar Sum = \"Sum\";\nvar SpaceToBatchND = \"SpaceToBatchND\";\nvar SplitV = \"SplitV\";\nvar Softmax = \"Softmax\";\nvar SparseFillEmptyRows = \"SparseFillEmptyRows\";\nvar SparseReshape = \"SparseReshape\";\nvar SparseSegmentMean = \"SparseSegmentMean\";\nvar SparseSegmentSum = \"SparseSegmentSum\";\nvar SparseToDense = \"SparseToDense\";\nvar SquaredDifference = \"SquaredDifference\";\nvar Square = \"Square\";\nvar StridedSlice = \"StridedSlice\";\nvar StringNGrams = \"StringNGrams\";\nvar StringSplit = \"StringSplit\";\nvar StringToHashBucketFast = \"StringToHashBucketFast\";\nvar Sub = \"Sub\";\nvar Tan = \"Tan\";\nvar Tanh = \"Tanh\";\nvar Tile = \"Tile\";\nvar TopK = \"TopK\";\nvar Transform = \"Transform\";\nvar Transpose = \"Transpose\";\nvar Unique = \"Unique\";\nvar Unpack = \"Unpack\";\nvar UnsortedSegmentSum = \"UnsortedSegmentSum\";\nvar UpperBound = \"UpperBound\";\nvar ZerosLike = \"ZerosLike\";\nvar Step = \"Step\";\nvar FromPixels = \"FromPixels\";\nvar RotateWithOffset = \"RotateWithOffset\";\nvar _FusedMatMul = \"_FusedMatMul\";\nvar FusedConv2D = \"FusedConv2D\";\nvar FusedDepthwiseConv2D = \"FusedDepthwiseConv2D\";\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/log.js\nfunction warn(...msg) {\n if (!(env().getBool(\"IS_TEST\") || env().getBool(\"PROD\"))) {\n console.warn(...msg);\n }\n}\nfunction log(...msg) {\n if (!(env().getBool(\"IS_TEST\") || env().getBool(\"PROD\"))) {\n console.log(...msg);\n }\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/kernel_registry.js\nvar kernelRegistry = getGlobal(\"kernelRegistry\", () => /* @__PURE__ */ new Map());\nvar gradRegistry = getGlobal(\"gradRegistry\", () => /* @__PURE__ */ new Map());\nfunction getKernel(kernelName, backendName) {\n const key = makeKey(kernelName, backendName);\n return kernelRegistry.get(key);\n}\nfunction getGradient(kernelName) {\n return gradRegistry.get(kernelName);\n}\nfunction getKernelsForBackend(backendName) {\n const it = kernelRegistry.entries();\n const result = [];\n while (true) {\n const { done, value } = it.next();\n if (done) {\n break;\n }\n const [key, config] = value;\n const [backend2] = key.split(\"_\");\n if (backend2 === backendName) {\n result.push(config);\n }\n }\n return result;\n}\nfunction registerKernel(config) {\n const { kernelName, backendName } = config;\n const key = makeKey(kernelName, backendName);\n if (kernelRegistry.has(key)) {\n warn(`The kernel '${kernelName}' for backend '${backendName}' is already registered`);\n }\n kernelRegistry.set(key, config);\n}\nfunction registerGradient(config) {\n const { kernelName } = config;\n if (gradRegistry.has(kernelName)) {\n if (env().getBool(\"DEBUG\")) {\n warn(`Overriding the gradient for '${kernelName}'`);\n }\n }\n gradRegistry.set(kernelName, config);\n}\nfunction unregisterKernel(kernelName, backendName) {\n const key = makeKey(kernelName, backendName);\n if (!kernelRegistry.has(key)) {\n throw new Error(`The kernel '${kernelName}' for backend '${backendName}' is not registered`);\n }\n kernelRegistry.delete(key);\n}\nfunction unregisterGradient(kernelName) {\n if (!gradRegistry.has(kernelName)) {\n throw new Error(`The gradient '${kernelName}' for backend is not registered`);\n }\n gradRegistry.delete(kernelName);\n}\nfunction copyRegisteredKernels(registeredBackendName, newBackendName) {\n const kernels = getKernelsForBackend(registeredBackendName);\n kernels.forEach((kernelConfig) => {\n const newKernelConfig = Object.assign({}, kernelConfig, { backendName: newBackendName });\n registerKernel(newKernelConfig);\n });\n}\nfunction makeKey(kernelName, backendName) {\n return `${backendName}_${kernelName}`;\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/util.js\nvar util_exports = {};\n__export(util_exports, {\n arraysEqual: () => arraysEqual,\n assert: () => assert,\n assertNonNegativeIntegerDimensions: () => assertNonNegativeIntegerDimensions,\n assertNonNull: () => assertNonNull,\n assertShapesMatch: () => assertShapesMatch,\n bytesFromStringArray: () => bytesFromStringArray,\n bytesPerElement: () => bytesPerElement,\n checkConversionForErrors: () => checkConversionForErrors,\n clamp: () => clamp,\n computeStrides: () => computeStrides,\n createScalarValue: () => createScalarValue,\n createShuffledIndices: () => createShuffledIndices,\n decodeString: () => decodeString,\n distSquared: () => distSquared,\n encodeString: () => encodeString,\n fetch: () => fetch3,\n fingerPrint64: () => fingerPrint64,\n flatten: () => flatten,\n getArrayFromDType: () => getArrayFromDType,\n getTypedArrayFromDType: () => getTypedArrayFromDType,\n hasEncodingLoss: () => hasEncodingLoss,\n hexToLong: () => hexToLong,\n indexToLoc: () => indexToLoc,\n inferDtype: () => inferDtype,\n inferFromImplicitShape: () => inferFromImplicitShape,\n isBoolean: () => isBoolean,\n isFunction: () => isFunction,\n isInt: () => isInt,\n isNumber: () => isNumber,\n isPromise: () => isPromise,\n isScalarShape: () => isScalarShape,\n isString: () => isString,\n isTypedArray: () => isTypedArray,\n isValidDtype: () => isValidDtype,\n locToIndex: () => locToIndex,\n makeOnesTypedArray: () => makeOnesTypedArray,\n makeZerosNestedTypedArray: () => makeZerosNestedTypedArray,\n makeZerosTypedArray: () => makeZerosTypedArray,\n nearestDivisor: () => nearestDivisor,\n nearestLargerEven: () => nearestLargerEven,\n now: () => now,\n parseAxisParam: () => parseAxisParam,\n randUniform: () => randUniform,\n repeatedTry: () => repeatedTry,\n rightPad: () => rightPad,\n shuffle: () => shuffle,\n shuffleCombo: () => shuffleCombo,\n sizeFromShape: () => sizeFromShape,\n sizeToSquarishShape: () => sizeToSquarishShape,\n squeezeShape: () => squeezeShape,\n sum: () => sum,\n swap: () => swap,\n tanh: () => tanh,\n toNestedArray: () => toNestedArray,\n toTypedArray: () => toTypedArray\n});\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/hash_util.js\nvar LongExports = __toESM(require_long());\nvar Long = LongExports.default || LongExports;\nfunction hexToLong(hex) {\n return Long.fromString(hex, true, 16);\n}\nvar k0 = hexToLong(\"c3a5c85c97cb3127\");\nvar k1 = hexToLong(\"b492b66fbe98f273\");\nvar k2 = hexToLong(\"9ae16a3b2f90404f\");\nfunction shiftMix(val) {\n return val.xor(val.shru(47));\n}\nfunction fetch2(s2, offset, numBytes) {\n const bytes = s2.slice(offset, offset + numBytes);\n return Long.fromBytes(Array.from(bytes), true, true);\n}\nfunction fetch64(s2, offset) {\n return fetch2(s2, offset, 8);\n}\nfunction fetch32(s2, offset) {\n return fetch2(s2, offset, 4);\n}\nfunction rotate64(val, shift) {\n return shift === 0 ? val : val.shru(shift).or(val.shl(64 - shift));\n}\nfunction hashLen16(u, v, mul2 = hexToLong(\"9ddfea08eb382d69\")) {\n let a = u.xor(v).mul(mul2);\n a = a.xor(a.shru(47));\n let b = v.xor(a).mul(mul2);\n b = b.xor(b.shru(47));\n b = b.mul(mul2);\n return b;\n}\nfunction weakHashLen32WithSeeds(w, x, y, z, a, b) {\n a = a.add(w);\n b = rotate64(b.add(a).add(z), 21);\n const c = a;\n a = a.add(x);\n a = a.add(y);\n b = b.add(rotate64(a, 44));\n return [a.add(z), b.add(c)];\n}\nfunction weakHashLen32WithSeedsStr(s2, offset, a, b) {\n return weakHashLen32WithSeeds(fetch64(s2, offset), fetch64(s2, offset + 8), fetch64(s2, offset + 16), fetch64(s2, offset + 24), a, b);\n}\nfunction hashLen0to16(s2, len = s2.length) {\n if (len >= 8) {\n const mul2 = k2.add(len * 2);\n const a = fetch64(s2, 0).add(k2);\n const b = fetch64(s2, len - 8);\n const c = rotate64(b, 37).mul(mul2).add(a);\n const d = rotate64(a, 25).add(b).mul(mul2);\n return hashLen16(c, d, mul2);\n }\n if (len >= 4) {\n const mul2 = k2.add(len * 2);\n const a = fetch32(s2, 0);\n return hashLen16(a.shl(3).add(len), fetch32(s2, len - 4), mul2);\n }\n if (len > 0) {\n const a = s2[0];\n const b = s2[len >> 1];\n const c = s2[len - 1];\n const y = a + (b << 8);\n const z = len + (c << 2);\n return shiftMix(k2.mul(y).xor(k0.mul(z))).mul(k2);\n }\n return k2;\n}\nfunction hashLen17to32(s2, len = s2.length) {\n const mul2 = k2.add(len * 2);\n const a = fetch64(s2, 0).mul(k1);\n const b = fetch64(s2, 8);\n const c = fetch64(s2, len - 8).mul(mul2);\n const d = fetch64(s2, len - 16).mul(k2);\n return hashLen16(rotate64(a.add(b), 43).add(rotate64(c, 30)).add(d), a.add(rotate64(b.add(k2), 18)).add(c), mul2);\n}\nfunction hashLen33to64(s2, len = s2.length) {\n const mul2 = k2.add(len * 2);\n const a = fetch64(s2, 0).mul(k2);\n const b = fetch64(s2, 8);\n const c = fetch64(s2, len - 8).mul(mul2);\n const d = fetch64(s2, len - 16).mul(k2);\n const y = rotate64(a.add(b), 43).add(rotate64(c, 30)).add(d);\n const z = hashLen16(y, a.add(rotate64(b.add(k2), 18)).add(c), mul2);\n const e2 = fetch64(s2, 16).mul(mul2);\n const f = fetch64(s2, 24);\n const g = y.add(fetch64(s2, len - 32)).mul(mul2);\n const h = z.add(fetch64(s2, len - 24)).mul(mul2);\n return hashLen16(rotate64(e2.add(f), 43).add(rotate64(g, 30)).add(h), e2.add(rotate64(f.add(a), 18)).add(g), mul2);\n}\nfunction fingerPrint64(s2, len = s2.length) {\n const seed = Long.fromNumber(81, true);\n if (len <= 32) {\n if (len <= 16) {\n return hashLen0to16(s2, len);\n } else {\n return hashLen17to32(s2, len);\n }\n } else if (len <= 64) {\n return hashLen33to64(s2, len);\n }\n let x = seed;\n let y = seed.mul(k1).add(113);\n let z = shiftMix(y.mul(k2).add(113)).mul(k2);\n let v = [Long.UZERO, Long.UZERO];\n let w = [Long.UZERO, Long.UZERO];\n x = x.mul(k2).add(fetch64(s2, 0));\n let offset = 0;\n const end = (len - 1 >> 6) * 64;\n const last64 = end + (len - 1 & 63) - 63;\n do {\n x = rotate64(x.add(y).add(v[0]).add(fetch64(s2, offset + 8)), 37).mul(k1);\n y = rotate64(y.add(v[1]).add(fetch64(s2, offset + 48)), 42).mul(k1);\n x = x.xor(w[1]);\n y = y.add(v[0]).add(fetch64(s2, offset + 40));\n z = rotate64(z.add(w[0]), 33).mul(k1);\n v = weakHashLen32WithSeedsStr(s2, offset, v[1].mul(k1), x.add(w[0]));\n w = weakHashLen32WithSeedsStr(s2, offset + 32, z.add(w[1]), y.add(fetch64(s2, offset + 16)));\n [z, x] = [x, z];\n offset += 64;\n } while (offset !== end);\n const mul2 = k1.add(z.and(255).shl(1));\n offset = last64;\n w[0] = w[0].add(len - 1 & 63);\n v[0] = v[0].add(w[0]);\n w[0] = w[0].add(v[0]);\n x = rotate64(x.add(y).add(v[0]).add(fetch64(s2, offset + 8)), 37).mul(mul2);\n y = rotate64(y.add(v[1]).add(fetch64(s2, offset + 48)), 42).mul(mul2);\n x = x.xor(w[1].mul(9));\n y = y.add(v[0].mul(9).add(fetch64(s2, offset + 40)));\n z = rotate64(z.add(w[0]), 33).mul(mul2);\n v = weakHashLen32WithSeedsStr(s2, offset, v[1].mul(mul2), x.add(w[0]));\n w = weakHashLen32WithSeedsStr(s2, offset + 32, z.add(w[1]), y.add(fetch64(s2, offset + 16)));\n [z, x] = [x, z];\n return hashLen16(hashLen16(v[0], w[0], mul2).add(shiftMix(y).mul(k0)).add(z), hashLen16(v[1], w[1], mul2).add(x), mul2);\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/util.js\nfunction createScalarValue(value, dtype) {\n if (dtype === \"string\") {\n return encodeString(value);\n }\n return toTypedArray([value], dtype);\n}\nfunction noConversionNeeded(a, dtype) {\n return a instanceof Float32Array && dtype === \"float32\" || a instanceof Int32Array && dtype === \"int32\" || a instanceof Uint8Array && dtype === \"bool\";\n}\nfunction toTypedArray(a, dtype) {\n if (dtype === \"string\") {\n throw new Error(\"Cannot convert a string[] to a TypedArray\");\n }\n if (Array.isArray(a)) {\n a = flatten(a);\n }\n if (env().getBool(\"DEBUG\")) {\n checkConversionForErrors(a, dtype);\n }\n if (noConversionNeeded(a, dtype)) {\n return a;\n }\n if (dtype == null || dtype === \"float32\" || dtype === \"complex64\") {\n return new Float32Array(a);\n } else if (dtype === \"int32\") {\n return new Int32Array(a);\n } else if (dtype === \"bool\") {\n const bool = new Uint8Array(a.length);\n for (let i2 = 0; i2 < bool.length; ++i2) {\n if (Math.round(a[i2]) !== 0) {\n bool[i2] = 1;\n }\n }\n return bool;\n } else {\n throw new Error(`Unknown data type ${dtype}`);\n }\n}\nfunction now() {\n return env().platform.now();\n}\nfunction fetch3(path, requestInits) {\n return env().platform.fetch(path, requestInits);\n}\nfunction encodeString(s2, encoding = \"utf-8\") {\n encoding = encoding || \"utf-8\";\n return env().platform.encode(s2, encoding);\n}\nfunction decodeString(bytes, encoding = \"utf-8\") {\n encoding = encoding || \"utf-8\";\n return env().platform.decode(bytes, encoding);\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/profiler.js\nvar Profiler = class {\n constructor(backendTimer, logger) {\n this.backendTimer = backendTimer;\n this.logger = logger;\n if (logger == null) {\n this.logger = new Logger();\n }\n }\n profileKernel(kernelName, inputs, f) {\n let outputs;\n const holdResultWrapperFn = () => {\n outputs = f();\n };\n let timer;\n const start = now();\n if (this.backendTimer.timerAvailable()) {\n timer = this.backendTimer.time(holdResultWrapperFn);\n } else {\n holdResultWrapperFn();\n for (const output of outputs) {\n output.dataSync();\n }\n timer = Promise.resolve({ kernelMs: now() - start });\n }\n if (env().getBool(\"CHECK_COMPUTATION_FOR_ERRORS\")) {\n for (let i2 = 0; i2 < outputs.length; i2++) {\n const output = outputs[i2];\n output.data().then((tensorVals) => {\n checkComputationForErrors(tensorVals, output.dtype, kernelName);\n });\n }\n }\n const kernelProfile = {\n kernelName,\n outputs,\n inputs,\n timeMs: timer.then((timing) => timing.kernelMs),\n extraInfo: timer.then((timing) => timing.getExtraProfileInfo != null ? timing.getExtraProfileInfo() : \"\")\n };\n return kernelProfile;\n }\n logKernelProfile(kernelProfile) {\n const { kernelName, outputs, timeMs, inputs, extraInfo } = kernelProfile;\n outputs.forEach((result) => {\n Promise.all([result.data(), timeMs, extraInfo]).then((valueContainer) => {\n this.logger.logKernelProfile(kernelName, result, valueContainer[0], valueContainer[1], inputs, valueContainer[2]);\n });\n });\n }\n};\nfunction checkComputationForErrors(vals, dtype, kernelName) {\n if (dtype !== \"float32\") {\n return false;\n }\n for (let i2 = 0; i2 < vals.length; i2++) {\n const num = vals[i2];\n if (isNaN(num) || !isFinite(num)) {\n console.warn(`Found ${num} in the result of '${kernelName}'`);\n return true;\n }\n }\n return false;\n}\nvar Logger = class {\n logKernelProfile(name, result, vals, timeMs, inputs, extraInfo) {\n const time2 = typeof timeMs === \"number\" ? rightPad(`${timeMs}ms`, 9) : timeMs[\"error\"];\n const paddedName = rightPad(name, 25);\n const rank = result.rank;\n const size = result.size;\n const shape = rightPad(result.shape.toString(), 14);\n let inputShapesDescription = \"\";\n for (const name2 in inputs) {\n const input2 = inputs[name2];\n if (input2 != null) {\n const inputShape = input2.shape || result.shape;\n const inputRank = inputShape.length;\n inputShapesDescription += `${name2}: ${inputRank}D ${inputRank > 0 ? inputShape : \"\"} `;\n }\n }\n console.log(`%c${paddedName}\t%c${time2}\t%c${rank}D ${shape}\t%c${size}\t%c${inputShapesDescription}\t%c${extraInfo}`, \"font-weight:bold\", \"color:red\", \"color:blue\", \"color: orange\", \"color: green\", \"color: steelblue\");\n }\n};\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/tape.js\nfunction getFilteredNodesXToY(tape, xs, y) {\n const tensorsFromX = {};\n const nodesFromX = {};\n for (let i2 = 0; i2 < xs.length; i2++) {\n tensorsFromX[xs[i2].id] = true;\n }\n for (let i2 = 0; i2 < tape.length; i2++) {\n const node = tape[i2];\n const nodeInputs = node.inputs;\n for (const inputName in nodeInputs) {\n const input2 = nodeInputs[inputName];\n let anyInputFromX = false;\n for (let j = 0; j < xs.length; j++) {\n if (tensorsFromX[input2.id]) {\n node.outputs.forEach((output) => tensorsFromX[output.id] = true);\n anyInputFromX = true;\n nodesFromX[node.id] = true;\n break;\n }\n }\n if (anyInputFromX) {\n break;\n }\n }\n }\n const tensorsLeadToY = {};\n tensorsLeadToY[y.id] = true;\n const nodesToY = {};\n for (let i2 = tape.length - 1; i2 >= 0; i2--) {\n const node = tape[i2];\n const nodeInputs = node.inputs;\n for (let j = 0; j < node.outputs.length; j++) {\n if (tensorsLeadToY[node.outputs[j].id]) {\n for (const inputName in nodeInputs) {\n tensorsLeadToY[nodeInputs[inputName].id] = true;\n nodesToY[node.id] = true;\n }\n break;\n }\n }\n }\n const filteredTape = [];\n for (let i2 = 0; i2 < tape.length; i2++) {\n const node = tape[i2];\n if (nodesFromX[node.id] && nodesToY[node.id]) {\n const prunedInputs = {};\n for (const inputName in node.inputs) {\n const nodeInput = node.inputs[inputName];\n if (tensorsFromX[nodeInput.id]) {\n prunedInputs[inputName] = nodeInput;\n }\n }\n const prunedNode = Object.assign({}, node);\n prunedNode.inputs = prunedInputs;\n prunedNode.outputs = node.outputs;\n filteredTape.push(prunedNode);\n }\n }\n return filteredTape;\n}\nfunction backpropagateGradients(tensorAccumulatedGradientMap, filteredTape, tidy2, add5) {\n for (let i2 = filteredTape.length - 1; i2 >= 0; i2--) {\n const node = filteredTape[i2];\n const dys = [];\n node.outputs.forEach((o) => {\n const gradTensor = tensorAccumulatedGradientMap[o.id];\n if (gradTensor != null) {\n dys.push(gradTensor);\n } else {\n dys.push(null);\n }\n });\n if (node.gradient == null) {\n throw new Error(`Cannot compute gradient: gradient function not found for ${node.kernelName}.`);\n }\n const inputGradients = node.gradient(dys);\n for (const inputName in node.inputs) {\n if (!(inputName in inputGradients)) {\n throw new Error(`Cannot backprop through input ${inputName}. Available gradients found: ${Object.keys(inputGradients)}.`);\n }\n const dx = tidy2(() => inputGradients[inputName]());\n if (dx.dtype !== \"float32\") {\n throw new Error(`Error in gradient for op ${node.kernelName}. The gradient of input ${inputName} must have 'float32' dtype, but has '${dx.dtype}'`);\n }\n const x = node.inputs[inputName];\n if (!arraysEqual(dx.shape, x.shape)) {\n throw new Error(`Error in gradient for op ${node.kernelName}. The gradient of input '${inputName}' has shape '${dx.shape}', which does not match the shape of the input '${x.shape}'`);\n }\n if (tensorAccumulatedGradientMap[x.id] == null) {\n tensorAccumulatedGradientMap[x.id] = dx;\n } else {\n const curGradient = tensorAccumulatedGradientMap[x.id];\n tensorAccumulatedGradientMap[x.id] = add5(curGradient, dx);\n curGradient.dispose();\n }\n }\n }\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/tensor_format.js\nvar FORMAT_LIMIT_NUM_VALS = 20;\nvar FORMAT_NUM_FIRST_LAST_VALS = 3;\nvar FORMAT_NUM_SIG_DIGITS = 7;\nfunction tensorToString(vals, shape, dtype, verbose) {\n const strides = computeStrides(shape);\n const padPerCol = computeMaxSizePerColumn(vals, shape, dtype, strides);\n const rank = shape.length;\n const valsLines = subTensorToString(vals, shape, dtype, strides, padPerCol);\n const lines = [\"Tensor\"];\n if (verbose) {\n lines.push(` dtype: ${dtype}`);\n lines.push(` rank: ${rank}`);\n lines.push(` shape: [${shape}]`);\n lines.push(` values:`);\n }\n lines.push(valsLines.map((l3) => \" \" + l3).join(\"\\n\"));\n return lines.join(\"\\n\");\n}\nfunction computeMaxSizePerColumn(vals, shape, dtype, strides) {\n const n2 = sizeFromShape(shape);\n const numCols = strides[strides.length - 1];\n const padPerCol = new Array(numCols).fill(0);\n const rank = shape.length;\n const valuesOrTuples = dtype === \"complex64\" ? createComplexTuples(vals) : vals;\n if (rank > 1) {\n for (let row = 0; row < n2 / numCols; row++) {\n const offset = row * numCols;\n for (let j = 0; j < numCols; j++) {\n padPerCol[j] = Math.max(padPerCol[j], valToString(valuesOrTuples[offset + j], 0, dtype).length);\n }\n }\n }\n return padPerCol;\n}\nfunction valToString(val, pad3, dtype) {\n let valStr;\n if (Array.isArray(val)) {\n valStr = `${parseFloat(val[0].toFixed(FORMAT_NUM_SIG_DIGITS))} + ${parseFloat(val[1].toFixed(FORMAT_NUM_SIG_DIGITS))}j`;\n } else if (isString(val)) {\n valStr = `'${val}'`;\n } else if (dtype === \"bool\") {\n valStr = boolNumToString(val);\n } else {\n valStr = parseFloat(val.toFixed(FORMAT_NUM_SIG_DIGITS)).toString();\n }\n return rightPad(valStr, pad3);\n}\nfunction boolNumToString(v) {\n return v === 0 ? \"false\" : \"true\";\n}\nfunction subTensorToString(vals, shape, dtype, strides, padPerCol, isLast = true) {\n const storagePerElement = dtype === \"complex64\" ? 2 : 1;\n const size = shape[0];\n const rank = shape.length;\n if (rank === 0) {\n if (dtype === \"complex64\") {\n const complexTuple = createComplexTuples(vals);\n return [valToString(complexTuple[0], 0, dtype)];\n }\n if (dtype === \"bool\") {\n return [boolNumToString(vals[0])];\n }\n return [vals[0].toString()];\n }\n if (rank === 1) {\n if (size > FORMAT_LIMIT_NUM_VALS) {\n const firstValsSize = FORMAT_NUM_FIRST_LAST_VALS * storagePerElement;\n let firstVals = Array.from(vals.slice(0, firstValsSize));\n let lastVals = Array.from(vals.slice((size - FORMAT_NUM_FIRST_LAST_VALS) * storagePerElement, size * storagePerElement));\n if (dtype === \"complex64\") {\n firstVals = createComplexTuples(firstVals);\n lastVals = createComplexTuples(lastVals);\n }\n return [\n \"[\" + firstVals.map((x, i2) => valToString(x, padPerCol[i2], dtype)).join(\", \") + \", ..., \" + lastVals.map((x, i2) => valToString(x, padPerCol[size - FORMAT_NUM_FIRST_LAST_VALS + i2], dtype)).join(\", \") + \"]\"\n ];\n }\n const displayVals = dtype === \"complex64\" ? createComplexTuples(vals) : Array.from(vals);\n return [\n \"[\" + displayVals.map((x, i2) => valToString(x, padPerCol[i2], dtype)).join(\", \") + \"]\"\n ];\n }\n const subshape = shape.slice(1);\n const substrides = strides.slice(1);\n const stride = strides[0] * storagePerElement;\n const lines = [];\n if (size > FORMAT_LIMIT_NUM_VALS) {\n for (let i2 = 0; i2 < FORMAT_NUM_FIRST_LAST_VALS; i2++) {\n const start = i2 * stride;\n const end = start + stride;\n lines.push(...subTensorToString(vals.slice(start, end), subshape, dtype, substrides, padPerCol, false));\n }\n lines.push(\"...\");\n for (let i2 = size - FORMAT_NUM_FIRST_LAST_VALS; i2 < size; i2++) {\n const start = i2 * stride;\n const end = start + stride;\n lines.push(...subTensorToString(vals.slice(start, end), subshape, dtype, substrides, padPerCol, i2 === size - 1));\n }\n } else {\n for (let i2 = 0; i2 < size; i2++) {\n const start = i2 * stride;\n const end = start + stride;\n lines.push(...subTensorToString(vals.slice(start, end), subshape, dtype, substrides, padPerCol, i2 === size - 1));\n }\n }\n const sep = rank === 2 ? \",\" : \"\";\n lines[0] = \"[\" + lines[0] + sep;\n for (let i2 = 1; i2 < lines.length - 1; i2++) {\n lines[i2] = \" \" + lines[i2] + sep;\n }\n let newLineSep = \",\\n\";\n for (let i2 = 2; i2 < rank; i2++) {\n newLineSep += \"\\n\";\n }\n lines[lines.length - 1] = \" \" + lines[lines.length - 1] + \"]\" + (isLast ? \"\" : newLineSep);\n return lines;\n}\nfunction createComplexTuples(vals) {\n const complexTuples = [];\n for (let i2 = 0; i2 < vals.length; i2 += 2) {\n complexTuples.push([vals[i2], vals[i2 + 1]]);\n }\n return complexTuples;\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/tensor.js\nvar TensorBuffer = class {\n constructor(shape, dtype, values) {\n this.dtype = dtype;\n this.shape = shape.slice();\n this.size = sizeFromShape(shape);\n if (values != null) {\n const n2 = values.length;\n assert(n2 === this.size, () => `Length of values '${n2}' does not match the size inferred by the shape '${this.size}'.`);\n }\n if (dtype === \"complex64\") {\n throw new Error(`complex64 dtype TensorBuffers are not supported. Please create a TensorBuffer for the real and imaginary parts separately and call tf.complex(real, imag).`);\n }\n this.values = values || getArrayFromDType(dtype, this.size);\n this.strides = computeStrides(shape);\n }\n set(value, ...locs) {\n if (locs.length === 0) {\n locs = [0];\n }\n assert(locs.length === this.rank, () => `The number of provided coordinates (${locs.length}) must match the rank (${this.rank})`);\n const index = this.locToIndex(locs);\n this.values[index] = value;\n }\n get(...locs) {\n if (locs.length === 0) {\n locs = [0];\n }\n let i2 = 0;\n for (const loc of locs) {\n if (loc < 0 || loc >= this.shape[i2]) {\n const msg = `Requested out of range element at ${locs}. Buffer shape=${this.shape}`;\n throw new Error(msg);\n }\n i2++;\n }\n let index = locs[locs.length - 1];\n for (let i3 = 0; i3 < locs.length - 1; ++i3) {\n index += this.strides[i3] * locs[i3];\n }\n return this.values[index];\n }\n locToIndex(locs) {\n if (this.rank === 0) {\n return 0;\n } else if (this.rank === 1) {\n return locs[0];\n }\n let index = locs[locs.length - 1];\n for (let i2 = 0; i2 < locs.length - 1; ++i2) {\n index += this.strides[i2] * locs[i2];\n }\n return index;\n }\n indexToLoc(index) {\n if (this.rank === 0) {\n return [];\n } else if (this.rank === 1) {\n return [index];\n }\n const locs = new Array(this.shape.length);\n for (let i2 = 0; i2 < locs.length - 1; ++i2) {\n locs[i2] = Math.floor(index / this.strides[i2]);\n index -= locs[i2] * this.strides[i2];\n }\n locs[locs.length - 1] = index;\n return locs;\n }\n get rank() {\n return this.shape.length;\n }\n toTensor() {\n return trackerFn().makeTensor(this.values, this.shape, this.dtype);\n }\n};\nvar trackerFn = null;\nvar opHandler = null;\nvar deprecationWarningFn = null;\nfunction setTensorTracker(fn) {\n trackerFn = fn;\n}\nfunction setOpHandler(handler) {\n opHandler = handler;\n}\nfunction setDeprecationWarningFn(fn) {\n deprecationWarningFn = fn;\n}\nvar Tensor = class {\n constructor(shape, dtype, dataId, id) {\n this.kept = false;\n this.isDisposedInternal = false;\n this.shape = shape.slice();\n this.dtype = dtype || \"float32\";\n this.size = sizeFromShape(shape);\n this.strides = computeStrides(shape);\n this.dataId = dataId;\n this.id = id;\n this.rankType = this.rank < 5 ? this.rank.toString() : \"higher\";\n }\n get rank() {\n return this.shape.length;\n }\n async buffer() {\n const vals = await this.data();\n return opHandler.buffer(this.shape, this.dtype, vals);\n }\n bufferSync() {\n return opHandler.buffer(this.shape, this.dtype, this.dataSync());\n }\n async array() {\n const vals = await this.data();\n return toNestedArray(this.shape, vals, this.dtype === \"complex64\");\n }\n arraySync() {\n return toNestedArray(this.shape, this.dataSync(), this.dtype === \"complex64\");\n }\n async data() {\n this.throwIfDisposed();\n const data = trackerFn().read(this.dataId);\n if (this.dtype === \"string\") {\n const bytes = await data;\n try {\n return bytes.map((b) => decodeString(b));\n } catch (_a) {\n throw new Error(\"Failed to decode the string bytes into utf-8. To get the original bytes, call tensor.bytes().\");\n }\n }\n return data;\n }\n dataToGPU(options) {\n this.throwIfDisposed();\n return trackerFn().readToGPU(this.dataId, options);\n }\n dataSync() {\n this.throwIfDisposed();\n const data = trackerFn().readSync(this.dataId);\n if (this.dtype === \"string\") {\n try {\n return data.map((b) => decodeString(b));\n } catch (_a) {\n throw new Error(\"Failed to decode the string bytes into utf-8. To get the original bytes, call tensor.bytes().\");\n }\n }\n return data;\n }\n async bytes() {\n this.throwIfDisposed();\n const data = await trackerFn().read(this.dataId);\n if (this.dtype === \"string\") {\n return data;\n } else {\n return new Uint8Array(data.buffer);\n }\n }\n dispose() {\n if (this.isDisposed) {\n return;\n }\n trackerFn().disposeTensor(this);\n this.isDisposedInternal = true;\n }\n get isDisposed() {\n return this.isDisposedInternal;\n }\n throwIfDisposed() {\n if (this.isDisposed) {\n throw new Error(`Tensor is disposed.`);\n }\n }\n print(verbose = false) {\n return opHandler.print(this, verbose);\n }\n clone() {\n this.throwIfDisposed();\n return opHandler.clone(this);\n }\n toString(verbose = false) {\n const vals = this.dataSync();\n return tensorToString(vals, this.shape, this.dtype, verbose);\n }\n cast(dtype) {\n this.throwIfDisposed();\n return opHandler.cast(this, dtype);\n }\n variable(trainable = true, name, dtype) {\n this.throwIfDisposed();\n return trackerFn().makeVariable(this, trainable, name, dtype);\n }\n};\nObject.defineProperty(Tensor, Symbol.hasInstance, {\n value: (instance) => {\n return !!instance && instance.data != null && instance.dataSync != null && instance.throwIfDisposed != null;\n }\n});\nfunction getGlobalTensorClass() {\n return getGlobal(\"Tensor\", () => {\n return Tensor;\n });\n}\ngetGlobalTensorClass();\nvar Variable = class extends Tensor {\n constructor(initialValue, trainable, name, tensorId) {\n super(initialValue.shape, initialValue.dtype, initialValue.dataId, tensorId);\n this.trainable = trainable;\n this.name = name;\n }\n assign(newValue) {\n if (newValue.dtype !== this.dtype) {\n throw new Error(`dtype of the new value (${newValue.dtype}) and previous value (${this.dtype}) must match`);\n }\n if (!arraysEqual(newValue.shape, this.shape)) {\n throw new Error(`shape of the new value (${newValue.shape}) and previous value (${this.shape}) must match`);\n }\n trackerFn().disposeTensor(this);\n this.dataId = newValue.dataId;\n trackerFn().incRef(this, null);\n }\n dispose() {\n trackerFn().disposeVariable(this);\n this.isDisposedInternal = true;\n }\n};\nObject.defineProperty(Variable, Symbol.hasInstance, {\n value: (instance) => {\n return instance instanceof Tensor && instance.assign != null && instance.assign instanceof Function;\n }\n});\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/tensor_util.js\nvar tensor_util_exports = {};\n__export(tensor_util_exports, {\n assertTypesMatch: () => assertTypesMatch,\n getTensorsInContainer: () => getTensorsInContainer,\n isTensorInList: () => isTensorInList,\n makeTypesMatch: () => makeTypesMatch\n});\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/types.js\nvar Rank;\n(function(Rank2) {\n Rank2[\"R0\"] = \"R0\";\n Rank2[\"R1\"] = \"R1\";\n Rank2[\"R2\"] = \"R2\";\n Rank2[\"R3\"] = \"R3\";\n Rank2[\"R4\"] = \"R4\";\n Rank2[\"R5\"] = \"R5\";\n Rank2[\"R6\"] = \"R6\";\n})(Rank || (Rank = {}));\nvar UpcastInt32AndMap;\n(function(UpcastInt32AndMap2) {\n UpcastInt32AndMap2[\"float32\"] = \"float32\";\n UpcastInt32AndMap2[\"int32\"] = \"int32\";\n UpcastInt32AndMap2[\"bool\"] = \"int32\";\n UpcastInt32AndMap2[\"complex64\"] = \"complex64\";\n})(UpcastInt32AndMap || (UpcastInt32AndMap = {}));\nvar UpcastBoolAndMap;\n(function(UpcastBoolAndMap2) {\n UpcastBoolAndMap2[\"float32\"] = \"float32\";\n UpcastBoolAndMap2[\"int32\"] = \"int32\";\n UpcastBoolAndMap2[\"bool\"] = \"bool\";\n UpcastBoolAndMap2[\"complex64\"] = \"complex64\";\n})(UpcastBoolAndMap || (UpcastBoolAndMap = {}));\nvar UpcastFloat32AndMap;\n(function(UpcastFloat32AndMap2) {\n UpcastFloat32AndMap2[\"float32\"] = \"float32\";\n UpcastFloat32AndMap2[\"int32\"] = \"float32\";\n UpcastFloat32AndMap2[\"bool\"] = \"float32\";\n UpcastFloat32AndMap2[\"complex64\"] = \"complex64\";\n})(UpcastFloat32AndMap || (UpcastFloat32AndMap = {}));\nvar UpcastComplex64AndMap;\n(function(UpcastComplex64AndMap2) {\n UpcastComplex64AndMap2[\"float32\"] = \"complex64\";\n UpcastComplex64AndMap2[\"int32\"] = \"complex64\";\n UpcastComplex64AndMap2[\"bool\"] = \"complex64\";\n UpcastComplex64AndMap2[\"complex64\"] = \"complex64\";\n})(UpcastComplex64AndMap || (UpcastComplex64AndMap = {}));\nvar upcastTypeMap = {\n \"float32\": UpcastFloat32AndMap,\n \"int32\": UpcastInt32AndMap,\n \"bool\": UpcastBoolAndMap,\n \"complex64\": UpcastComplex64AndMap\n};\nfunction upcastType(typeA, typeB) {\n if (typeA === \"string\" || typeB === \"string\") {\n if (typeA === \"string\" && typeB === \"string\") {\n return \"string\";\n }\n throw new Error(`Can not upcast ${typeA} with ${typeB}`);\n }\n return upcastTypeMap[typeA][typeB];\n}\nfunction sumOutType(type) {\n return upcastType(type, \"int32\");\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/tensor_util.js\nfunction makeTypesMatch(a, b) {\n if (a.dtype === b.dtype) {\n return [a, b];\n }\n const dtype = upcastType(a.dtype, b.dtype);\n return [a.cast(dtype), b.cast(dtype)];\n}\nfunction assertTypesMatch(a, b) {\n assert(a.dtype === b.dtype, () => `The dtypes of the first(${a.dtype}) and second(${b.dtype}) input must match`);\n}\nfunction isTensorInList(tensor2, tensorList) {\n return tensorList.some((x) => x.id === tensor2.id);\n}\nfunction getTensorsInContainer(result) {\n const list = [];\n const seen = /* @__PURE__ */ new Set();\n walkTensorContainer(result, list, seen);\n return list;\n}\nfunction walkTensorContainer(container, list, seen) {\n if (container == null) {\n return;\n }\n if (container instanceof Tensor) {\n list.push(container);\n return;\n }\n if (!isIterable(container)) {\n return;\n }\n const iterable = container;\n for (const k in iterable) {\n const val = iterable[k];\n if (!seen.has(val)) {\n seen.add(val);\n walkTensorContainer(val, list, seen);\n }\n }\n}\nfunction isIterable(obj) {\n return Array.isArray(obj) || typeof obj === \"object\";\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/engine.js\nfunction isRegisteredKernelInvocation(kernelInvocation) {\n return kernelInvocation.kernelName != null;\n}\nvar EngineState = class {\n constructor() {\n this.registeredVariables = {};\n this.nextTapeNodeId = 0;\n this.numBytes = 0;\n this.numTensors = 0;\n this.numStringTensors = 0;\n this.numDataBuffers = 0;\n this.gradientDepth = 0;\n this.kernelDepth = 0;\n this.scopeStack = [];\n this.numDataMovesStack = [];\n this.nextScopeId = 0;\n this.tensorInfo = /* @__PURE__ */ new WeakMap();\n this.profiling = false;\n this.activeProfile = {\n newBytes: 0,\n newTensors: 0,\n peakBytes: 0,\n kernels: [],\n result: null,\n get kernelNames() {\n return Array.from(new Set(this.kernels.map((k) => k.name)));\n }\n };\n }\n dispose() {\n for (const variableName in this.registeredVariables) {\n this.registeredVariables[variableName].dispose();\n }\n }\n};\nvar Engine = class {\n constructor(ENV8) {\n this.ENV = ENV8;\n this.registry = {};\n this.registryFactory = {};\n this.pendingBackendInitId = 0;\n this.state = new EngineState();\n }\n async ready() {\n if (this.pendingBackendInit != null) {\n return this.pendingBackendInit.then(() => {\n });\n }\n if (this.backendInstance != null) {\n return;\n }\n const sortedBackends = this.getSortedBackends();\n for (let i2 = 0; i2 < sortedBackends.length; i2++) {\n const backendName = sortedBackends[i2];\n const success = await this.initializeBackend(backendName).success;\n if (success) {\n await this.setBackend(backendName);\n return;\n }\n }\n throw new Error(`Could not initialize any backends, all backend initializations failed.`);\n }\n get backend() {\n if (this.pendingBackendInit != null) {\n throw new Error(`Backend '${this.backendName}' has not yet been initialized. Make sure to await tf.ready() or await tf.setBackend() before calling other methods`);\n }\n if (this.backendInstance == null) {\n const { name, asyncInit } = this.initializeBackendsAndReturnBest();\n if (asyncInit) {\n throw new Error(`The highest priority backend '${name}' has not yet been initialized. Make sure to await tf.ready() or await tf.setBackend() before calling other methods`);\n }\n this.setBackend(name);\n }\n return this.backendInstance;\n }\n backendNames() {\n return Object.keys(this.registryFactory);\n }\n findBackend(backendName) {\n if (!(backendName in this.registry)) {\n if (backendName in this.registryFactory) {\n const { asyncInit } = this.initializeBackend(backendName);\n if (asyncInit) {\n return null;\n }\n } else {\n return null;\n }\n }\n return this.registry[backendName];\n }\n findBackendFactory(backendName) {\n if (!(backendName in this.registryFactory)) {\n return null;\n }\n return this.registryFactory[backendName].factory;\n }\n registerBackend(backendName, factory, priority = 1) {\n if (backendName in this.registryFactory) {\n warn(`${backendName} backend was already registered. Reusing existing backend factory.`);\n return false;\n }\n this.registryFactory[backendName] = { factory, priority };\n return true;\n }\n async setBackend(backendName) {\n if (this.registryFactory[backendName] == null) {\n throw new Error(`Backend name '${backendName}' not found in registry`);\n }\n this.backendName = backendName;\n if (this.registry[backendName] == null) {\n this.backendInstance = null;\n const { success, asyncInit } = this.initializeBackend(backendName);\n const result = asyncInit ? await success : success;\n if (!result) {\n return false;\n }\n }\n this.backendInstance = this.registry[backendName];\n this.setupRegisteredKernels();\n this.profiler = new Profiler(this.backendInstance);\n return true;\n }\n setupRegisteredKernels() {\n const kernels = getKernelsForBackend(this.backendName);\n kernels.forEach((kernel) => {\n if (kernel.setupFunc != null) {\n kernel.setupFunc(this.backendInstance);\n }\n });\n }\n disposeRegisteredKernels(backendName) {\n const kernels = getKernelsForBackend(backendName);\n kernels.forEach((kernel) => {\n if (kernel.disposeFunc != null) {\n kernel.disposeFunc(this.registry[backendName]);\n }\n });\n }\n initializeBackend(backendName) {\n const registryFactoryEntry = this.registryFactory[backendName];\n if (registryFactoryEntry == null) {\n throw new Error(`Cannot initialize backend ${backendName}, no registration found.`);\n }\n try {\n const backend2 = registryFactoryEntry.factory();\n if (backend2 && !(backend2 instanceof KernelBackend) && typeof backend2.then === \"function\") {\n const promiseId = ++this.pendingBackendInitId;\n const success = backend2.then((backendInstance) => {\n if (promiseId < this.pendingBackendInitId) {\n return false;\n }\n this.registry[backendName] = backendInstance;\n this.pendingBackendInit = null;\n return true;\n }).catch((err) => {\n if (promiseId < this.pendingBackendInitId) {\n return false;\n }\n this.pendingBackendInit = null;\n warn(`Initialization of backend ${backendName} failed`);\n warn(err.stack || err.message);\n return false;\n });\n this.pendingBackendInit = success;\n return { success, asyncInit: true };\n } else {\n this.registry[backendName] = backend2;\n return { success: true, asyncInit: false };\n }\n } catch (err) {\n warn(`Initialization of backend ${backendName} failed`);\n warn(err.stack || err.message);\n return { success: false, asyncInit: false };\n }\n }\n removeBackend(backendName) {\n if (!(backendName in this.registryFactory)) {\n throw new Error(`${backendName} backend not found in registry`);\n }\n if (this.backendName === backendName && this.pendingBackendInit != null) {\n this.pendingBackendInitId++;\n }\n if (backendName in this.registry) {\n this.disposeRegisteredKernels(backendName);\n this.registry[backendName].dispose();\n delete this.registry[backendName];\n }\n delete this.registryFactory[backendName];\n if (this.backendName === backendName) {\n this.pendingBackendInit = null;\n this.backendName = null;\n this.backendInstance = null;\n }\n }\n getSortedBackends() {\n if (Object.keys(this.registryFactory).length === 0) {\n throw new Error(\"No backend found in registry.\");\n }\n return Object.keys(this.registryFactory).sort((a, b) => {\n return this.registryFactory[b].priority - this.registryFactory[a].priority;\n });\n }\n initializeBackendsAndReturnBest() {\n const sortedBackends = this.getSortedBackends();\n for (let i2 = 0; i2 < sortedBackends.length; i2++) {\n const backendName = sortedBackends[i2];\n const { success, asyncInit } = this.initializeBackend(backendName);\n if (asyncInit || success) {\n return { name: backendName, asyncInit };\n }\n }\n throw new Error(`Could not initialize any backends, all backend initializations failed.`);\n }\n moveData(backend2, dataId) {\n const info = this.state.tensorInfo.get(dataId);\n const srcBackend = info.backend;\n const values = this.readSync(dataId);\n const refCount = srcBackend.refCount(dataId);\n srcBackend.disposeData(dataId, true);\n info.backend = backend2;\n backend2.move(dataId, values, info.shape, info.dtype, refCount);\n if (this.shouldCheckForMemLeaks()) {\n this.state.numDataMovesStack[this.state.numDataMovesStack.length - 1]++;\n }\n }\n tidy(nameOrFn, fn) {\n let name = null;\n if (fn == null) {\n if (typeof nameOrFn !== \"function\") {\n throw new Error(\"Please provide a function to tidy()\");\n }\n fn = nameOrFn;\n } else {\n if (typeof nameOrFn !== \"string\" && !(nameOrFn instanceof String)) {\n throw new Error(\"When calling with two arguments, the first argument to tidy() must be a string\");\n }\n if (typeof fn !== \"function\") {\n throw new Error(\"When calling with two arguments, the 2nd argument to tidy() must be a function\");\n }\n name = nameOrFn;\n }\n let result;\n return this.scopedRun(() => this.startScope(name), () => this.endScope(result), () => {\n result = fn();\n if (result instanceof Promise) {\n console.error(\"Cannot return a Promise inside of tidy.\");\n }\n return result;\n });\n }\n scopedRun(start, end, f) {\n start();\n try {\n const res = f();\n end();\n return res;\n } catch (ex) {\n end();\n throw ex;\n }\n }\n nextTensorId() {\n return Engine.nextTensorId++;\n }\n nextVariableId() {\n return Engine.nextVariableId++;\n }\n clone(x) {\n const y = ENGINE.runKernel(Identity, { x });\n const inputs = { x };\n const grad2 = (dy) => ({\n x: () => {\n const dtype = \"float32\";\n const gradInputs = { x: dy };\n const attrs = { dtype };\n return ENGINE.runKernel(\n Cast,\n gradInputs,\n attrs\n );\n }\n });\n const saved = [];\n this.addTapeNode(this.state.activeScope.name, inputs, [y], grad2, saved, {});\n return y;\n }\n runKernel(kernelName, inputs, attrs) {\n if (this.backendName == null) {\n this.backend;\n }\n const hasKernel = getKernel(kernelName, this.backendName) != null;\n if (!hasKernel) {\n throw new Error(`Kernel '${kernelName}' not registered for backend '${this.backendName}'`);\n }\n return this.runKernelFunc({ kernelName, inputs, attrs });\n }\n shouldCheckForMemLeaks() {\n return this.ENV.getBool(\"IS_TEST\");\n }\n checkKernelForMemLeak(kernelName, numDataIdsBefore, outInfos) {\n const numDataIdsAfter = this.backend.numDataIds();\n let numOutputDataIds = 0;\n outInfos.forEach((info) => {\n numOutputDataIds += info.dtype === \"complex64\" ? 3 : 1;\n });\n const numMoves = this.state.numDataMovesStack[this.state.numDataMovesStack.length - 1];\n const dataIdsLeaked = numDataIdsAfter - numDataIdsBefore - numOutputDataIds - numMoves;\n if (dataIdsLeaked > 0) {\n throw new Error(`Backend '${this.backendName}' has an internal memory leak (${dataIdsLeaked} data ids) after running '${kernelName}'`);\n }\n }\n runKernelFunc(kernelParams) {\n let outputs;\n let saved = [];\n const isTapeOn = this.isTapeOn();\n const startingBytecount = this.state.numBytes;\n const startingNumTensors = this.state.numTensors;\n if (this.shouldCheckForMemLeaks()) {\n this.state.numDataMovesStack.push(0);\n }\n let kernelFunc3;\n if (this.backendName == null) {\n this.backend;\n }\n let out;\n const kernelOrScopeName = isRegisteredKernelInvocation(kernelParams) ? kernelParams.kernelName : this.state.activeScope != null ? this.state.activeScope.name : \"\";\n if (isRegisteredKernelInvocation(kernelParams)) {\n const { kernelName, inputs: inputs2, attrs: attrs2 } = kernelParams;\n if (this.backendName == null) {\n this.backend;\n }\n const kernel = getKernel(kernelName, this.backendName);\n assert(kernel != null, () => `Cannot find registered kernel '${kernelName}' for backend '${this.backendName}'`);\n kernelFunc3 = () => {\n const numDataIdsBefore = this.backend.numDataIds();\n out = kernel.kernelFunc({ inputs: inputs2, attrs: attrs2, backend: this.backend });\n const outInfos = Array.isArray(out) ? out : [out];\n if (this.shouldCheckForMemLeaks()) {\n this.checkKernelForMemLeak(kernelName, numDataIdsBefore, outInfos);\n }\n const outTensors = outInfos.map((outInfo) => {\n if (outInfo.rank != null) {\n return outInfo;\n }\n return this.makeTensorFromTensorInfo(outInfo);\n });\n if (isTapeOn) {\n const tensorsToSave = this.getTensorsForGradient(kernelName, inputs2, outTensors);\n saved = this.saveTensorsForBackwardMode(tensorsToSave);\n }\n return outTensors;\n };\n } else {\n const { forwardFunc } = kernelParams;\n const saveFunc = (tensors) => {\n if (!isTapeOn) {\n return;\n }\n saved = tensors.map((tensor2) => this.keep(this.clone(tensor2)));\n };\n kernelFunc3 = () => {\n const numDataIdsBefore = this.backend.numDataIds();\n out = this.tidy(() => forwardFunc(this.backend, saveFunc));\n const outs = Array.isArray(out) ? out : [out];\n if (this.shouldCheckForMemLeaks()) {\n this.checkKernelForMemLeak(kernelOrScopeName, numDataIdsBefore, outs);\n }\n return outs;\n };\n }\n const { inputs, attrs } = kernelParams;\n const backwardsFunc = isRegisteredKernelInvocation(kernelParams) ? null : kernelParams.backwardsFunc;\n let kernelProfile;\n this.scopedRun(\n () => this.state.kernelDepth++,\n () => this.state.kernelDepth--,\n () => {\n if (!this.ENV.getBool(\"DEBUG\") && !this.state.profiling) {\n outputs = kernelFunc3();\n } else {\n kernelProfile = this.profiler.profileKernel(kernelOrScopeName, inputs, () => kernelFunc3());\n if (this.ENV.getBool(\"DEBUG\")) {\n this.profiler.logKernelProfile(kernelProfile);\n }\n outputs = kernelProfile.outputs;\n }\n }\n );\n if (isTapeOn) {\n this.addTapeNode(kernelOrScopeName, inputs, outputs, backwardsFunc, saved, attrs);\n }\n if (this.state.profiling) {\n this.state.activeProfile.kernels.push({\n name: kernelOrScopeName,\n bytesAdded: this.state.numBytes - startingBytecount,\n totalBytesSnapshot: this.state.numBytes,\n tensorsAdded: this.state.numTensors - startingNumTensors,\n totalTensorsSnapshot: this.state.numTensors,\n inputShapes: Object.keys(inputs).map((key) => inputs[key] != null ? inputs[key].shape : null),\n outputShapes: outputs.map((item) => item.shape),\n kernelTimeMs: kernelProfile.timeMs,\n extraInfo: kernelProfile.extraInfo\n });\n }\n return Array.isArray(out) ? outputs : outputs[0];\n }\n saveTensorsForBackwardMode(tensors) {\n const saved = tensors.map((tensor2) => this.keep(this.clone(tensor2)));\n return saved;\n }\n getTensorsForGradient(kernelName, inputs, outputs) {\n const gradConfig = getGradient(kernelName);\n if (gradConfig != null) {\n const inputsToSave = gradConfig.inputsToSave || [];\n const outputsToSave = gradConfig.outputsToSave || [];\n let inputTensorsToSave;\n if (gradConfig.saveAllInputs) {\n assert(Array.isArray(inputs), () => \"saveAllInputs is true, expected inputs to be an array.\");\n inputTensorsToSave = Object.keys(inputs).map((key) => inputs[key]);\n } else {\n inputTensorsToSave = inputsToSave.map((inputName) => inputs[inputName]);\n }\n const outputTensorsToSave = outputs.filter((_, i2) => outputsToSave[i2]);\n return inputTensorsToSave.concat(outputTensorsToSave);\n }\n return [];\n }\n makeTensor(values, shape, dtype, backend2) {\n if (values == null) {\n throw new Error(\"Values passed to engine.makeTensor() are null\");\n }\n dtype = dtype || \"float32\";\n backend2 = backend2 || this.backend;\n let backendVals = values;\n if (dtype === \"string\" && isString(values[0])) {\n backendVals = values.map((d) => encodeString(d));\n }\n const dataId = backend2.write(backendVals, shape, dtype);\n const t2 = new Tensor(shape, dtype, dataId, this.nextTensorId());\n this.trackTensor(t2, backend2);\n if (dtype === \"string\") {\n const info = this.state.tensorInfo.get(dataId);\n const newBytes = bytesFromStringArray(backendVals);\n this.state.numBytes += newBytes - info.bytes;\n info.bytes = newBytes;\n }\n return t2;\n }\n makeTensorFromDataId(dataId, shape, dtype, backend2) {\n dtype = dtype || \"float32\";\n const tensorInfo = { dataId, shape, dtype };\n return this.makeTensorFromTensorInfo(tensorInfo, backend2);\n }\n makeTensorFromTensorInfo(tensorInfo, backend2) {\n const { dataId, shape, dtype } = tensorInfo;\n const t2 = new Tensor(shape, dtype, dataId, this.nextTensorId());\n this.trackTensor(t2, backend2);\n return t2;\n }\n makeVariable(initialValue, trainable = true, name, dtype) {\n name = name || this.nextVariableId().toString();\n if (dtype != null && dtype !== initialValue.dtype) {\n initialValue = initialValue.cast(dtype);\n }\n const v = new Variable(initialValue, trainable, name, this.nextTensorId());\n if (this.state.registeredVariables[v.name] != null) {\n throw new Error(`Variable with name ${v.name} was already registered`);\n }\n this.state.registeredVariables[v.name] = v;\n this.incRef(v, this.backend);\n return v;\n }\n trackTensor(a, backend2) {\n this.state.numTensors++;\n if (a.dtype === \"string\") {\n this.state.numStringTensors++;\n }\n let bytes = 0;\n if (a.dtype !== \"complex64\" && a.dtype !== \"string\") {\n bytes = a.size * bytesPerElement(a.dtype);\n }\n this.state.numBytes += bytes;\n if (!this.state.tensorInfo.has(a.dataId)) {\n this.state.numDataBuffers++;\n this.state.tensorInfo.set(a.dataId, {\n backend: backend2 || this.backend,\n dtype: a.dtype,\n shape: a.shape,\n bytes\n });\n }\n if (!(a instanceof Variable)) {\n this.track(a);\n }\n }\n incRef(a, backend2) {\n this.trackTensor(a, backend2);\n this.backend.incRef(a.dataId);\n }\n removeDataId(dataId, backend2) {\n if (this.state.tensorInfo.has(dataId) && this.state.tensorInfo.get(dataId).backend === backend2) {\n this.state.tensorInfo.delete(dataId);\n this.state.numDataBuffers--;\n }\n }\n disposeTensor(a) {\n if (!this.state.tensorInfo.has(a.dataId)) {\n return;\n }\n const info = this.state.tensorInfo.get(a.dataId);\n this.state.numTensors--;\n if (a.dtype === \"string\") {\n this.state.numStringTensors--;\n this.state.numBytes -= info.bytes;\n }\n if (a.dtype !== \"complex64\" && a.dtype !== \"string\") {\n const bytes = a.size * bytesPerElement(a.dtype);\n this.state.numBytes -= bytes;\n }\n if (info.backend.disposeData(a.dataId)) {\n this.removeDataId(a.dataId, info.backend);\n }\n }\n disposeVariables() {\n for (const varName in this.state.registeredVariables) {\n const v = this.state.registeredVariables[varName];\n this.disposeVariable(v);\n }\n }\n disposeVariable(v) {\n this.disposeTensor(v);\n if (this.state.registeredVariables[v.name] != null) {\n delete this.state.registeredVariables[v.name];\n }\n }\n memory() {\n const info = this.backend.memory();\n info.numTensors = this.state.numTensors;\n info.numDataBuffers = this.state.numDataBuffers;\n info.numBytes = this.state.numBytes;\n if (this.state.numStringTensors > 0) {\n info.unreliable = true;\n if (info.reasons == null) {\n info.reasons = [];\n }\n info.reasons.push(\"Memory usage by string tensors is approximate (2 bytes per character)\");\n }\n return info;\n }\n async profile(query) {\n this.state.profiling = true;\n const startBytes = this.state.numBytes;\n const startNumTensors = this.state.numTensors;\n this.state.activeProfile.kernels = [];\n this.state.activeProfile.result = await query();\n this.state.profiling = false;\n this.state.activeProfile.peakBytes = Math.max(...this.state.activeProfile.kernels.map((d) => d.totalBytesSnapshot));\n this.state.activeProfile.newBytes = this.state.numBytes - startBytes;\n this.state.activeProfile.newTensors = this.state.numTensors - startNumTensors;\n for (const kernel of this.state.activeProfile.kernels) {\n kernel.kernelTimeMs = await kernel.kernelTimeMs;\n kernel.extraInfo = await kernel.extraInfo;\n }\n return this.state.activeProfile;\n }\n isTapeOn() {\n return this.state.gradientDepth > 0 && this.state.kernelDepth === 0;\n }\n addTapeNode(kernelName, inputs, outputs, gradientsFunc, saved, attrs) {\n const tapeNode = { id: this.state.nextTapeNodeId++, kernelName, inputs, outputs, saved };\n const gradConfig = getGradient(kernelName);\n if (gradConfig != null) {\n gradientsFunc = gradConfig.gradFunc;\n }\n if (gradientsFunc != null) {\n tapeNode.gradient = (dys) => {\n dys = dys.map((dy, i2) => {\n if (dy == null) {\n const output = outputs[i2];\n const vals = makeZerosTypedArray(output.size, output.dtype);\n return this.makeTensor(vals, output.shape, output.dtype);\n }\n return dy;\n });\n return gradientsFunc(dys.length > 1 ? dys : dys[0], saved, attrs);\n };\n }\n this.state.activeTape.push(tapeNode);\n }\n keep(result) {\n result.kept = true;\n return result;\n }\n startTape() {\n if (this.state.gradientDepth === 0) {\n this.state.activeTape = [];\n }\n this.state.gradientDepth++;\n }\n endTape() {\n this.state.gradientDepth--;\n }\n startScope(name) {\n const scopeInfo = {\n track: [],\n name: \"unnamed scope\",\n id: this.state.nextScopeId++\n };\n if (name) {\n scopeInfo.name = name;\n }\n this.state.scopeStack.push(scopeInfo);\n this.state.activeScope = scopeInfo;\n }\n endScope(result) {\n const tensorsToTrackInParent = getTensorsInContainer(result);\n const tensorsToTrackInParentSet = new Set(tensorsToTrackInParent.map((t2) => t2.id));\n for (let i2 = 0; i2 < this.state.activeScope.track.length; i2++) {\n const tensor2 = this.state.activeScope.track[i2];\n if (!tensor2.kept && !tensorsToTrackInParentSet.has(tensor2.id)) {\n tensor2.dispose();\n }\n }\n const oldScope = this.state.scopeStack.pop();\n this.state.activeScope = this.state.scopeStack.length === 0 ? null : this.state.scopeStack[this.state.scopeStack.length - 1];\n tensorsToTrackInParent.forEach((tensor2) => {\n if (!tensor2.kept && tensor2.scopeId === oldScope.id) {\n this.track(tensor2);\n }\n });\n }\n gradients(f, xs, dy, allowNoGradients = false) {\n assert(xs.length > 0, () => \"gradients() received an empty list of xs.\");\n if (dy != null && dy.dtype !== \"float32\") {\n throw new Error(`dy must have 'float32' dtype, but has '${dy.dtype}'`);\n }\n const y = this.scopedRun(() => this.startTape(), () => this.endTape(), () => this.tidy(\"forward\", f));\n assert(y instanceof Tensor, () => \"The result y returned by f() must be a tensor.\");\n const filteredTape = getFilteredNodesXToY(this.state.activeTape, xs, y);\n if (!allowNoGradients && filteredTape.length === 0 && xs.length > 0) {\n throw new Error(\"Cannot compute gradient of y=f(x) with respect to x. Make sure that the f you passed encloses all operations that lead from x to y.\");\n }\n return this.tidy(\"backward\", () => {\n const accumulatedGradientMap = {};\n accumulatedGradientMap[y.id] = dy == null ? ones(y.shape) : dy;\n backpropagateGradients(\n accumulatedGradientMap,\n filteredTape,\n (f2) => this.tidy(f2),\n add\n );\n const grads2 = xs.map((x) => accumulatedGradientMap[x.id]);\n if (this.state.gradientDepth === 0) {\n this.state.activeTape.forEach((node) => {\n for (const tensor2 of node.saved) {\n tensor2.dispose();\n }\n });\n this.state.activeTape = null;\n }\n return { value: y, grads: grads2 };\n });\n }\n customGrad(f) {\n assert(isFunction(f), () => \"The f passed in customGrad(f) must be a function.\");\n return (...inputs) => {\n assert(inputs.every((t2) => t2 instanceof Tensor), () => \"The args passed in customGrad(f)(x1, x2,...) must all be tensors\");\n let res;\n const inputMap = {};\n inputs.forEach((input2, i2) => {\n inputMap[i2] = input2;\n });\n const forwardFunc = (_, save) => {\n res = f(...[...inputs, save]);\n assert(res.value instanceof Tensor, () => \"The function f passed in customGrad(f) must return an object where `obj.value` is a tensor\");\n assert(isFunction(res.gradFunc), () => \"The function f passed in customGrad(f) must return an object where `obj.gradFunc` is a function.\");\n return res.value;\n };\n const backwardsFunc = (dy, saved) => {\n const gradRes = res.gradFunc(dy, saved);\n const grads2 = Array.isArray(gradRes) ? gradRes : [gradRes];\n assert(grads2.length === inputs.length, () => \"The function f passed in customGrad(f) must return an object where `obj.gradFunc` is a function that returns the same number of tensors as inputs passed to f(...).\");\n assert(grads2.every((t2) => t2 instanceof Tensor), () => \"The function f passed in customGrad(f) must return an object where `obj.gradFunc` is a function that returns a list of only tensors.\");\n const gradMap = {};\n grads2.forEach((grad2, i2) => {\n gradMap[i2] = () => grad2;\n });\n return gradMap;\n };\n return this.runKernelFunc({\n forwardFunc,\n backwardsFunc,\n inputs: inputMap\n });\n };\n }\n readSync(dataId) {\n const info = this.state.tensorInfo.get(dataId);\n return info.backend.readSync(dataId);\n }\n read(dataId) {\n const info = this.state.tensorInfo.get(dataId);\n return info.backend.read(dataId);\n }\n readToGPU(dataId, options) {\n const info = this.state.tensorInfo.get(dataId);\n return info.backend.readToGPU(dataId, options);\n }\n async time(query) {\n const start = now();\n const timingInfo = await this.backend.time(query);\n timingInfo.wallMs = now() - start;\n return timingInfo;\n }\n track(result) {\n if (this.state.activeScope != null) {\n result.scopeId = this.state.activeScope.id;\n this.state.activeScope.track.push(result);\n }\n return result;\n }\n get registeredVariables() {\n return this.state.registeredVariables;\n }\n reset() {\n this.pendingBackendInitId++;\n this.state.dispose();\n this.ENV.reset();\n this.state = new EngineState();\n for (const backendName in this.registry) {\n this.disposeRegisteredKernels(backendName);\n this.registry[backendName].dispose();\n delete this.registry[backendName];\n }\n this.backendName = null;\n this.backendInstance = null;\n this.pendingBackendInit = null;\n }\n};\nEngine.nextTensorId = 0;\nEngine.nextVariableId = 0;\nfunction ones(shape) {\n const values = makeOnesTypedArray(sizeFromShape(shape), \"float32\");\n return ENGINE.makeTensor(values, shape, \"float32\");\n}\nfunction getOrMakeEngine() {\n const ns = getGlobalNamespace();\n if (ns._tfengine == null) {\n const environment = new Environment(ns);\n ns._tfengine = new Engine(environment);\n }\n setEnvironmentGlobal(ns._tfengine.ENV);\n setTensorTracker(() => ns._tfengine);\n return ns._tfengine;\n}\nvar ENGINE = getOrMakeEngine();\nfunction add(a, b) {\n const inputs = { a, b };\n return ENGINE.runKernel(Add, inputs);\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/device_util.js\nvar device_util_exports = {};\n__export(device_util_exports, {\n isBrowser: () => isBrowser,\n isMobile: () => isMobile,\n mockIsMobile: () => mockIsMobile\n});\nfunction _isNavigatorDefined() {\n return typeof navigator !== \"undefined\" && navigator != null;\n}\nvar isMobileMockValue;\nfunction mockIsMobile(value) {\n isMobileMockValue = value;\n}\nfunction isMobile(nav) {\n if (isMobileMockValue !== void 0) {\n return isMobileMockValue;\n }\n if (nav || _isNavigatorDefined()) {\n if (!nav) {\n nav = navigator;\n }\n if (nav.product === \"ReactNative\") {\n return true;\n }\n const a = nav.userAgent || nav.vendor || (typeof window !== \"undefined\" ? window.opera : \"\");\n if (!a) {\n const navAny = nav;\n return navAny.userAgentData && navAny.userAgentData.mobile;\n }\n return /(android|bb\\d+|meego).+mobile|avantgo|bada\\/|blackberry|blazer|compal|elaine|fennec|hiptop|iemobile|ip(hone|od)|iris|kindle|lge |maemo|midp|mmp|mobile.+firefox|netfront|opera m(ob|in)i|palm( os)?|phone|p(ixi|re)\\/|plucker|pocket|psp|series(4|6)0|symbian|treo|up\\.(browser|link)|vodafone|wap|windows ce|xda|xiino/i.test(a) || /1207|6310|6590|3gso|4thp|50[1-6]i|770s|802s|a wa|abac|ac(er|oo|s\\-)|ai(ko|rn)|al(av|ca|co)|amoi|an(ex|ny|yw)|aptu|ar(ch|go)|as(te|us)|attw|au(di|\\-m|r |s )|avan|be(ck|ll|nq)|bi(lb|rd)|bl(ac|az)|br(e|v)w|bumb|bw\\-(n|u)|c55\\/|capi|ccwa|cdm\\-|cell|chtm|cldc|cmd\\-|co(mp|nd)|craw|da(it|ll|ng)|dbte|dc\\-s|devi|dica|dmob|do(c|p)o|ds(12|\\-d)|el(49|ai)|em(l2|ul)|er(ic|k0)|esl8|ez([4-7]0|os|wa|ze)|fetc|fly(\\-|_)|g1 u|g560|gene|gf\\-5|g\\-mo|go(\\.w|od)|gr(ad|un)|haie|hcit|hd\\-(m|p|t)|hei\\-|hi(pt|ta)|hp( i|ip)|hs\\-c|ht(c(\\-| |_|a|g|p|s|t)|tp)|hu(aw|tc)|i\\-(20|go|ma)|i230|iac( |\\-|\\/)|ibro|idea|ig01|ikom|im1k|inno|ipaq|iris|ja(t|v)a|jbro|jemu|jigs|kddi|keji|kgt( |\\/)|klon|kpt |kwc\\-|kyo(c|k)|le(no|xi)|lg( g|\\/(k|l|u)|50|54|\\-[a-w])|libw|lynx|m1\\-w|m3ga|m50\\/|ma(te|ui|xo)|mc(01|21|ca)|m\\-cr|me(rc|ri)|mi(o8|oa|ts)|mmef|mo(01|02|bi|de|do|t(\\-| |o|v)|zz)|mt(50|p1|v )|mwbp|mywa|n10[0-2]|n20[2-3]|n30(0|2)|n50(0|2|5)|n7(0(0|1)|10)|ne((c|m)\\-|on|tf|wf|wg|wt)|nok(6|i)|nzph|o2im|op(ti|wv)|oran|owg1|p800|pan(a|d|t)|pdxg|pg(13|\\-([1-8]|c))|phil|pire|pl(ay|uc)|pn\\-2|po(ck|rt|se)|prox|psio|pt\\-g|qa\\-a|qc(07|12|21|32|60|\\-[2-7]|i\\-)|qtek|r380|r600|raks|rim9|ro(ve|zo)|s55\\/|sa(ge|ma|mm|ms|ny|va)|sc(01|h\\-|oo|p\\-)|sdk\\/|se(c(\\-|0|1)|47|mc|nd|ri)|sgh\\-|shar|sie(\\-|m)|sk\\-0|sl(45|id)|sm(al|ar|b3|it|t5)|so(ft|ny)|sp(01|h\\-|v\\-|v )|sy(01|mb)|t2(18|50)|t6(00|10|18)|ta(gt|lk)|tcl\\-|tdg\\-|tel(i|m)|tim\\-|t\\-mo|to(pl|sh)|ts(70|m\\-|m3|m5)|tx\\-9|up(\\.b|g1|si)|utst|v400|v750|veri|vi(rg|te)|vk(40|5[0-3]|\\-v)|vm40|voda|vulc|vx(52|53|60|61|70|80|81|83|85|98)|w3c(\\-| )|webc|whit|wi(g |nc|nw)|wmlb|wonu|x700|yas\\-|your|zeto|zte\\-/i.test(a.substr(0, 4));\n }\n return false;\n}\nfunction isBrowser() {\n return typeof window !== \"undefined\" && window.document != null || typeof WorkerGlobalScope !== \"undefined\";\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/flags.js\nvar ENV2 = env();\nENV2.registerFlag(\"DEBUG\", () => false, (debugValue) => {\n if (debugValue) {\n console.warn(\"Debugging mode is ON. The output of every math call will be downloaded to CPU and checked for NaNs. This significantly impacts performance.\");\n }\n});\nENV2.registerFlag(\"IS_BROWSER\", () => isBrowser());\nENV2.registerFlag(\"IS_NODE\", () => typeof process !== \"undefined\" && typeof process.versions !== \"undefined\" && typeof process.versions.node !== \"undefined\");\nENV2.registerFlag(\"IS_CHROME\", () => typeof navigator !== \"undefined\" && navigator != null && navigator.userAgent != null && /Chrome/.test(navigator.userAgent) && /Google Inc/.test(navigator.vendor));\nENV2.registerFlag(\"PROD\", () => false);\nENV2.registerFlag(\"TENSORLIKE_CHECK_SHAPE_CONSISTENCY\", () => ENV2.getBool(\"DEBUG\"));\nENV2.registerFlag(\"DEPRECATION_WARNINGS_ENABLED\", () => true);\nENV2.registerFlag(\"IS_TEST\", () => false);\nENV2.registerFlag(\"CHECK_COMPUTATION_FOR_ERRORS\", () => true);\nENV2.registerFlag(\"WRAP_TO_IMAGEBITMAP\", () => false);\nENV2.registerFlag(\"ENGINE_COMPILE_ONLY\", () => false);\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/tensor_util_env.js\nfunction inferShape(val, dtype) {\n let firstElem = val;\n if (isTypedArray(val)) {\n return dtype === \"string\" ? [] : [val.length];\n }\n if (!Array.isArray(val)) {\n return [];\n }\n const shape = [];\n while (Array.isArray(firstElem) || isTypedArray(firstElem) && dtype !== \"string\") {\n shape.push(firstElem.length);\n firstElem = firstElem[0];\n }\n if (Array.isArray(val) && env().getBool(\"TENSORLIKE_CHECK_SHAPE_CONSISTENCY\")) {\n deepAssertShapeConsistency(val, shape, []);\n }\n return shape;\n}\nfunction deepAssertShapeConsistency(val, shape, indices) {\n indices = indices || [];\n if (!Array.isArray(val) && !isTypedArray(val)) {\n assert(shape.length === 0, () => `Element arr[${indices.join(\"][\")}] is a primitive, but should be an array/TypedArray of ${shape[0]} elements`);\n return;\n }\n assert(shape.length > 0, () => `Element arr[${indices.join(\"][\")}] should be a primitive, but is an array of ${val.length} elements`);\n assert(val.length === shape[0], () => `Element arr[${indices.join(\"][\")}] should have ${shape[0]} elements, but has ${val.length} elements`);\n const subShape = shape.slice(1);\n for (let i2 = 0; i2 < val.length; ++i2) {\n deepAssertShapeConsistency(val[i2], subShape, indices.concat(i2));\n }\n}\nfunction assertDtype(expectedDtype, actualDType, argName, functionName) {\n if (expectedDtype === \"string_or_numeric\") {\n return;\n }\n if (expectedDtype == null) {\n throw new Error(`Expected dtype cannot be null.`);\n }\n if (expectedDtype !== \"numeric\" && expectedDtype !== actualDType || expectedDtype === \"numeric\" && actualDType === \"string\") {\n throw new Error(`Argument '${argName}' passed to '${functionName}' must be ${expectedDtype} tensor, but got ${actualDType} tensor`);\n }\n}\nfunction convertToTensor(x, argName, functionName, parseAsDtype = \"numeric\") {\n if (x instanceof Tensor) {\n assertDtype(parseAsDtype, x.dtype, argName, functionName);\n return x;\n }\n let inferredDtype = inferDtype(x);\n if (inferredDtype !== \"string\" && [\"bool\", \"int32\", \"float32\"].indexOf(parseAsDtype) >= 0) {\n inferredDtype = parseAsDtype;\n }\n assertDtype(parseAsDtype, inferredDtype, argName, functionName);\n if (x == null || !isTypedArray(x) && !Array.isArray(x) && typeof x !== \"number\" && typeof x !== \"boolean\" && typeof x !== \"string\") {\n const type = x == null ? \"null\" : x.constructor.name;\n throw new Error(`Argument '${argName}' passed to '${functionName}' must be a Tensor or TensorLike, but got '${type}'`);\n }\n const inferredShape = inferShape(x, inferredDtype);\n if (!isTypedArray(x) && !Array.isArray(x)) {\n x = [x];\n }\n const skipTypedArray = true;\n const values = inferredDtype !== \"string\" ? toTypedArray(x, inferredDtype) : flatten(x, [], skipTypedArray);\n return ENGINE.makeTensor(values, inferredShape, inferredDtype);\n}\nfunction convertToTensorArray(arg, argName, functionName, parseAsDtype = \"numeric\") {\n if (!Array.isArray(arg)) {\n throw new Error(`Argument ${argName} passed to ${functionName} must be a \\`Tensor[]\\` or \\`TensorLike[]\\``);\n }\n const tensors = arg;\n return tensors.map((t2, i2) => convertToTensor(t2, `${argName}[${i2}]`, functionName, parseAsDtype));\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/operation.js\nvar OP_SCOPE_SUFFIX = \"__op\";\nfunction op(f) {\n const keys = Object.keys(f);\n if (keys.length !== 1) {\n throw new Error(`Please provide an object with a single key (operation name) mapping to a function. Got an object with ${keys.length} keys.`);\n }\n let opName = keys[0];\n const fn = f[opName];\n if (opName.endsWith(\"_\")) {\n opName = opName.substring(0, opName.length - 1);\n }\n opName = opName + OP_SCOPE_SUFFIX;\n const f2 = (...args) => {\n ENGINE.startScope(opName);\n try {\n const result = fn(...args);\n if (isPromise(result)) {\n console.error(\"Cannot return a Promise inside of tidy.\");\n }\n ENGINE.endScope(result);\n return result;\n } catch (ex) {\n ENGINE.endScope(null);\n throw ex;\n }\n };\n Object.defineProperty(f2, \"name\", { value: opName, configurable: true });\n return f2;\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/complex.js\nfunction complex_(real5, imag5) {\n const $real = convertToTensor(real5, \"real\", \"complex\");\n const $imag = convertToTensor(imag5, \"imag\", \"complex\");\n assertShapesMatch($real.shape, $imag.shape, `real and imag shapes, ${$real.shape} and ${$imag.shape}, must match in call to tf.complex().`);\n const inputs = { real: $real, imag: $imag };\n return ENGINE.runKernel(Complex, inputs);\n}\nvar complex = op({ complex_ });\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/tensor_ops_util.js\nfunction makeTensor(values, shape, inferredShape, dtype) {\n if (dtype == null) {\n dtype = inferDtype(values);\n }\n if (dtype === \"complex64\") {\n throw new Error(`Cannot construct a complex64 tensor directly. Please use tf.complex(real, imag).`);\n }\n if (!isTypedArray(values) && !Array.isArray(values) && typeof values !== \"number\" && typeof values !== \"boolean\" && typeof values !== \"string\") {\n throw new Error(\"values passed to tensor(values) must be a number/boolean/string or an array of numbers/booleans/strings, or a TypedArray\");\n }\n if (shape != null) {\n assertNonNegativeIntegerDimensions(shape);\n const providedSize = sizeFromShape(shape);\n const inferredSize = sizeFromShape(inferredShape);\n assert(providedSize === inferredSize, () => `Based on the provided shape, [${shape}], the tensor should have ${providedSize} values but has ${inferredSize}`);\n for (let i2 = 0; i2 < inferredShape.length; ++i2) {\n const inferred = inferredShape[i2];\n const flatDimsDontMatch = i2 === inferredShape.length - 1 ? inferred !== sizeFromShape(shape.slice(i2)) : true;\n assert(inferredShape[i2] === shape[i2] || !flatDimsDontMatch, () => `Error creating a new Tensor. Inferred shape (${inferredShape}) does not match the provided shape (${shape}). `);\n }\n }\n if (!isTypedArray(values) && !Array.isArray(values)) {\n values = [values];\n }\n shape = shape || inferredShape;\n values = dtype !== \"string\" ? toTypedArray(values, dtype) : flatten(values, [], true);\n return ENGINE.makeTensor(values, shape, dtype);\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/tensor.js\nfunction tensor(values, shape, dtype) {\n const inferredShape = inferShape(values, dtype);\n return makeTensor(values, shape, inferredShape, dtype);\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/io/types.js\nvar DTYPE_VALUE_SIZE_MAP = {\n \"float32\": 4,\n \"float16\": 2,\n \"int32\": 4,\n \"uint16\": 2,\n \"uint8\": 1,\n \"bool\": 1,\n \"complex64\": 8\n};\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/io/io_utils.js\nvar NUM_BYTES_STRING_LENGTH = 4;\nasync function encodeWeights(tensors, group) {\n const specs = [];\n const dataPromises = [];\n const names = Array.isArray(tensors) ? tensors.map((tensor2) => tensor2.name) : Object.keys(tensors);\n for (let i2 = 0; i2 < names.length; ++i2) {\n const name = names[i2];\n const t2 = Array.isArray(tensors) ? tensors[i2].tensor : tensors[name];\n if (t2.dtype !== \"float32\" && t2.dtype !== \"int32\" && t2.dtype !== \"bool\" && t2.dtype !== \"string\" && t2.dtype !== \"complex64\") {\n throw new Error(`Unsupported dtype in weight '${name}': ${t2.dtype}`);\n }\n const spec = { name, shape: t2.shape, dtype: t2.dtype };\n if (t2.dtype === \"string\") {\n const utf8bytes = new Promise(async (resolve) => {\n const vals = await t2.bytes();\n const totalNumBytes = vals.reduce((p2, c) => p2 + c.length, 0) + NUM_BYTES_STRING_LENGTH * vals.length;\n const bytes = new Uint8Array(totalNumBytes);\n let offset = 0;\n for (let i3 = 0; i3 < vals.length; i3++) {\n const val = vals[i3];\n const bytesOfLength = new Uint8Array(new Uint32Array([val.length]).buffer);\n bytes.set(bytesOfLength, offset);\n offset += NUM_BYTES_STRING_LENGTH;\n bytes.set(val, offset);\n offset += val.length;\n }\n resolve(bytes);\n });\n dataPromises.push(utf8bytes);\n } else {\n dataPromises.push(t2.data());\n }\n if (group != null) {\n spec.group = group;\n }\n specs.push(spec);\n }\n const tensorValues = await Promise.all(dataPromises);\n return { data: concatenateTypedArrays(tensorValues), specs };\n}\nfunction decodeWeights(buffer2, specs) {\n const out = {};\n let float16Decode;\n let offset = 0;\n for (const spec of specs) {\n const name = spec.name;\n const dtype = spec.dtype;\n const shape = spec.shape;\n const size = sizeFromShape(shape);\n let values;\n if (\"quantization\" in spec) {\n const quantization = spec.quantization;\n if (quantization.dtype === \"uint8\" || quantization.dtype === \"uint16\") {\n if (!(\"min\" in quantization && \"scale\" in quantization)) {\n throw new Error(`Weight ${spec.name} with quantization ${quantization.dtype} doesn't have corresponding metadata min and scale.`);\n }\n } else if (quantization.dtype === \"float16\") {\n if (dtype !== \"float32\") {\n throw new Error(`Weight ${spec.name} is quantized with ${quantization.dtype} which only supports weights of type float32 not ${dtype}.`);\n }\n } else {\n throw new Error(`Weight ${spec.name} has unknown quantization dtype ${quantization.dtype}. Supported quantization dtypes are: 'uint8', 'uint16', and 'float16'.`);\n }\n const quantizationSizeFactor = DTYPE_VALUE_SIZE_MAP[quantization.dtype];\n const byteBuffer = buffer2.slice(offset, offset + size * quantizationSizeFactor);\n const quantizedArray = quantization.dtype === \"uint8\" ? new Uint8Array(byteBuffer) : new Uint16Array(byteBuffer);\n if (dtype === \"float32\") {\n if (quantization.dtype === \"uint8\" || quantization.dtype === \"uint16\") {\n values = new Float32Array(quantizedArray.length);\n for (let i2 = 0; i2 < quantizedArray.length; i2++) {\n const v = quantizedArray[i2];\n values[i2] = v * quantization.scale + quantization.min;\n }\n } else if (quantization.dtype === \"float16\") {\n if (float16Decode === void 0) {\n float16Decode = getFloat16Decoder();\n }\n values = float16Decode(quantizedArray);\n } else {\n throw new Error(`Unsupported quantization type ${quantization.dtype} for weight type float32.`);\n }\n } else if (dtype === \"int32\") {\n if (quantization.dtype !== \"uint8\" && quantization.dtype !== \"uint16\") {\n throw new Error(`Unsupported quantization type ${quantization.dtype} for weight type int32.`);\n }\n values = new Int32Array(quantizedArray.length);\n for (let i2 = 0; i2 < quantizedArray.length; i2++) {\n const v = quantizedArray[i2];\n values[i2] = Math.round(v * quantization.scale + quantization.min);\n }\n } else {\n throw new Error(`Unsupported dtype in weight '${name}': ${dtype}`);\n }\n offset += size * quantizationSizeFactor;\n } else if (dtype === \"string\") {\n const size2 = sizeFromShape(spec.shape);\n values = [];\n for (let i2 = 0; i2 < size2; i2++) {\n const byteLength = new Uint32Array(buffer2.slice(offset, offset + NUM_BYTES_STRING_LENGTH))[0];\n offset += NUM_BYTES_STRING_LENGTH;\n const bytes = new Uint8Array(buffer2.slice(offset, offset + byteLength));\n values.push(bytes);\n offset += byteLength;\n }\n } else {\n const dtypeFactor = DTYPE_VALUE_SIZE_MAP[dtype];\n const byteBuffer = buffer2.slice(offset, offset + size * dtypeFactor);\n if (dtype === \"float32\") {\n values = new Float32Array(byteBuffer);\n } else if (dtype === \"int32\") {\n values = new Int32Array(byteBuffer);\n } else if (dtype === \"bool\") {\n values = new Uint8Array(byteBuffer);\n } else if (dtype === \"complex64\") {\n values = new Float32Array(byteBuffer);\n const real5 = new Float32Array(values.length / 2);\n const image2 = new Float32Array(values.length / 2);\n for (let i2 = 0; i2 < real5.length; i2++) {\n real5[i2] = values[i2 * 2];\n image2[i2] = values[i2 * 2 + 1];\n }\n const realTensor = tensor(real5, shape, \"float32\");\n const imageTensor = tensor(image2, shape, \"float32\");\n out[name] = complex(realTensor, imageTensor);\n realTensor.dispose();\n imageTensor.dispose();\n } else {\n throw new Error(`Unsupported dtype in weight '${name}': ${dtype}`);\n }\n offset += size * dtypeFactor;\n }\n if (dtype !== \"complex64\") {\n out[name] = tensor(values, shape, dtype);\n }\n }\n return out;\n}\nfunction concatenateTypedArrays(xs) {\n if (xs === null) {\n throw new Error(`Invalid input value: ${JSON.stringify(xs)}`);\n }\n let totalByteLength = 0;\n const normalizedXs = [];\n xs.forEach((x) => {\n totalByteLength += x.byteLength;\n normalizedXs.push(x.byteLength === x.buffer.byteLength ? x : new x.constructor(x));\n if (!(x instanceof Float32Array || x instanceof Int32Array || x instanceof Uint8Array)) {\n throw new Error(`Unsupported TypedArray subtype: ${x.constructor.name}`);\n }\n });\n const y = new Uint8Array(totalByteLength);\n let offset = 0;\n normalizedXs.forEach((x) => {\n y.set(new Uint8Array(x.buffer), offset);\n offset += x.byteLength;\n });\n return y.buffer;\n}\nvar useNodeBuffer = typeof Buffer !== \"undefined\" && (typeof Blob === \"undefined\" || typeof atob === \"undefined\" || typeof btoa === \"undefined\");\nfunction stringByteLength(str) {\n if (useNodeBuffer) {\n return Buffer.byteLength(str);\n }\n return new Blob([str]).size;\n}\nfunction arrayBufferToBase64String(buffer2) {\n if (useNodeBuffer) {\n return Buffer.from(buffer2).toString(\"base64\");\n }\n const buf = new Uint8Array(buffer2);\n let s2 = \"\";\n for (let i2 = 0, l3 = buf.length; i2 < l3; i2++) {\n s2 += String.fromCharCode(buf[i2]);\n }\n return btoa(s2);\n}\nfunction base64StringToArrayBuffer(str) {\n if (useNodeBuffer) {\n const buf = Buffer.from(str, \"base64\");\n return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n }\n const s2 = atob(str);\n const buffer2 = new Uint8Array(s2.length);\n for (let i2 = 0; i2 < s2.length; ++i2) {\n buffer2.set([s2.charCodeAt(i2)], i2);\n }\n return buffer2.buffer;\n}\nfunction concatenateArrayBuffers(buffers) {\n if (buffers.length === 1) {\n return buffers[0];\n }\n let totalByteLength = 0;\n buffers.forEach((buffer2) => {\n totalByteLength += buffer2.byteLength;\n });\n const temp = new Uint8Array(totalByteLength);\n let offset = 0;\n buffers.forEach((buffer2) => {\n temp.set(new Uint8Array(buffer2), offset);\n offset += buffer2.byteLength;\n });\n return temp.buffer;\n}\nfunction basename(path) {\n const SEPARATOR = \"/\";\n path = path.trim();\n while (path.endsWith(SEPARATOR)) {\n path = path.slice(0, path.length - 1);\n }\n const items = path.split(SEPARATOR);\n return items[items.length - 1];\n}\nfunction getModelJSONForModelArtifacts(artifacts, manifest) {\n const result = {\n modelTopology: artifacts.modelTopology,\n format: artifacts.format,\n generatedBy: artifacts.generatedBy,\n convertedBy: artifacts.convertedBy,\n weightsManifest: manifest\n };\n if (artifacts.signature != null) {\n result.signature = artifacts.signature;\n }\n if (artifacts.userDefinedMetadata != null) {\n result.userDefinedMetadata = artifacts.userDefinedMetadata;\n }\n if (artifacts.modelInitializer != null) {\n result.modelInitializer = artifacts.modelInitializer;\n }\n if (artifacts.trainingConfig != null) {\n result.trainingConfig = artifacts.trainingConfig;\n }\n return result;\n}\nasync function getModelArtifactsForJSON(modelJSON, loadWeights2) {\n const modelArtifacts = {\n modelTopology: modelJSON.modelTopology,\n format: modelJSON.format,\n generatedBy: modelJSON.generatedBy,\n convertedBy: modelJSON.convertedBy\n };\n if (modelJSON.trainingConfig != null) {\n modelArtifacts.trainingConfig = modelJSON.trainingConfig;\n }\n if (modelJSON.weightsManifest != null) {\n const [weightSpecs, weightData] = await loadWeights2(modelJSON.weightsManifest);\n modelArtifacts.weightSpecs = weightSpecs;\n modelArtifacts.weightData = weightData;\n }\n if (modelJSON.signature != null) {\n modelArtifacts.signature = modelJSON.signature;\n }\n if (modelJSON.userDefinedMetadata != null) {\n modelArtifacts.userDefinedMetadata = modelJSON.userDefinedMetadata;\n }\n if (modelJSON.modelInitializer != null) {\n modelArtifacts.modelInitializer = modelJSON.modelInitializer;\n }\n return modelArtifacts;\n}\nfunction getModelArtifactsInfoForJSON(modelArtifacts) {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error(\"Expected JSON model topology, received ArrayBuffer.\");\n }\n return {\n dateSaved: new Date(),\n modelTopologyType: \"JSON\",\n modelTopologyBytes: modelArtifacts.modelTopology == null ? 0 : stringByteLength(JSON.stringify(modelArtifacts.modelTopology)),\n weightSpecsBytes: modelArtifacts.weightSpecs == null ? 0 : stringByteLength(JSON.stringify(modelArtifacts.weightSpecs)),\n weightDataBytes: modelArtifacts.weightData == null ? 0 : modelArtifacts.weightData.byteLength\n };\n}\nfunction computeFloat16MantisaTable() {\n const convertMantissa = (i2) => {\n let m = i2 << 13;\n let e2 = 0;\n while ((m & 8388608) === 0) {\n e2 -= 8388608;\n m <<= 1;\n }\n m &= ~8388608;\n e2 += 947912704;\n return m | e2;\n };\n const mantisaTable = new Uint32Array(2048);\n mantisaTable[0] = 0;\n for (let i2 = 1; i2 < 1024; i2++) {\n mantisaTable[i2] = convertMantissa(i2);\n }\n for (let i2 = 1024; i2 < 2048; i2++) {\n mantisaTable[i2] = 939524096 + (i2 - 1024 << 13);\n }\n return mantisaTable;\n}\nfunction computeFloat16ExponentTable() {\n const exponentTable = new Uint32Array(64);\n exponentTable[0] = 0;\n exponentTable[31] = 1199570944;\n exponentTable[32] = 2147483648;\n exponentTable[63] = 3347054592;\n for (let i2 = 1; i2 < 31; i2++) {\n exponentTable[i2] = i2 << 23;\n }\n for (let i2 = 33; i2 < 63; i2++) {\n exponentTable[i2] = 2147483648 + (i2 - 32 << 23);\n }\n return exponentTable;\n}\nfunction computeFloat16OffsetTable() {\n const offsetTable = new Uint32Array(64);\n for (let i2 = 0; i2 < 64; i2++) {\n offsetTable[i2] = 1024;\n }\n offsetTable[0] = offsetTable[32] = 0;\n return offsetTable;\n}\nfunction getFloat16Decoder() {\n const mantisaTable = computeFloat16MantisaTable();\n const exponentTable = computeFloat16ExponentTable();\n const offsetTable = computeFloat16OffsetTable();\n return (quantizedArray) => {\n const buffer2 = new ArrayBuffer(4 * quantizedArray.length);\n const bufferUint32View = new Uint32Array(buffer2);\n for (let index = 0; index < quantizedArray.length; index++) {\n const float16Bits = quantizedArray[index];\n const float32Bits = mantisaTable[offsetTable[float16Bits >> 10] + (float16Bits & 1023)] + exponentTable[float16Bits >> 10];\n bufferUint32View[index] = float32Bits;\n }\n return new Float32Array(buffer2);\n };\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/io/router_registry.js\nvar IORouterRegistry = class {\n constructor() {\n this.saveRouters = [];\n this.loadRouters = [];\n }\n static getInstance() {\n if (IORouterRegistry.instance == null) {\n IORouterRegistry.instance = new IORouterRegistry();\n }\n return IORouterRegistry.instance;\n }\n static registerSaveRouter(saveRouter) {\n IORouterRegistry.getInstance().saveRouters.push(saveRouter);\n }\n static registerLoadRouter(loadRouter) {\n IORouterRegistry.getInstance().loadRouters.push(loadRouter);\n }\n static getSaveHandlers(url) {\n return IORouterRegistry.getHandlers(url, \"save\");\n }\n static getLoadHandlers(url, loadOptions) {\n return IORouterRegistry.getHandlers(url, \"load\", loadOptions);\n }\n static getHandlers(url, handlerType, loadOptions) {\n const validHandlers = [];\n const routers = handlerType === \"load\" ? IORouterRegistry.getInstance().loadRouters : IORouterRegistry.getInstance().saveRouters;\n routers.forEach((router) => {\n const handler = router(url, loadOptions);\n if (handler !== null) {\n validHandlers.push(handler);\n }\n });\n return validHandlers;\n }\n};\nvar registerSaveRouter = (loudRouter) => IORouterRegistry.registerSaveRouter(loudRouter);\nvar registerLoadRouter = (loudRouter) => IORouterRegistry.registerLoadRouter(loudRouter);\nvar getSaveHandlers = (url) => IORouterRegistry.getSaveHandlers(url);\nvar getLoadHandlers = (url, loadOptions) => IORouterRegistry.getLoadHandlers(url, loadOptions);\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/io/indexed_db.js\nvar DATABASE_NAME = \"tensorflowjs\";\nvar DATABASE_VERSION = 1;\nvar MODEL_STORE_NAME = \"models_store\";\nvar INFO_STORE_NAME = \"model_info_store\";\nfunction getIndexedDBFactory() {\n if (!env().getBool(\"IS_BROWSER\")) {\n throw new Error(\"Failed to obtain IndexedDB factory because the current environmentis not a web browser.\");\n }\n const theWindow = typeof window === \"undefined\" ? self : window;\n const factory = theWindow.indexedDB || theWindow.mozIndexedDB || theWindow.webkitIndexedDB || theWindow.msIndexedDB || theWindow.shimIndexedDB;\n if (factory == null) {\n throw new Error(\"The current browser does not appear to support IndexedDB.\");\n }\n return factory;\n}\nfunction setUpDatabase(openRequest) {\n const db = openRequest.result;\n db.createObjectStore(MODEL_STORE_NAME, { keyPath: \"modelPath\" });\n db.createObjectStore(INFO_STORE_NAME, { keyPath: \"modelPath\" });\n}\nvar BrowserIndexedDB = class {\n constructor(modelPath) {\n this.indexedDB = getIndexedDBFactory();\n if (modelPath == null || !modelPath) {\n throw new Error(\"For IndexedDB, modelPath must not be null, undefined or empty.\");\n }\n this.modelPath = modelPath;\n }\n async save(modelArtifacts) {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error(\"BrowserLocalStorage.save() does not support saving model topology in binary formats yet.\");\n }\n return this.databaseAction(this.modelPath, modelArtifacts);\n }\n async load() {\n return this.databaseAction(this.modelPath);\n }\n databaseAction(modelPath, modelArtifacts) {\n return new Promise((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n openRequest.onsuccess = () => {\n const db = openRequest.result;\n if (modelArtifacts == null) {\n const modelTx = db.transaction(MODEL_STORE_NAME, \"readonly\");\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const getRequest = modelStore.get(this.modelPath);\n getRequest.onsuccess = () => {\n if (getRequest.result == null) {\n db.close();\n return reject(new Error(`Cannot find model with path '${this.modelPath}' in IndexedDB.`));\n } else {\n resolve(getRequest.result.modelArtifacts);\n }\n };\n getRequest.onerror = (error) => {\n db.close();\n return reject(getRequest.error);\n };\n modelTx.oncomplete = () => db.close();\n } else {\n const modelArtifactsInfo = getModelArtifactsInfoForJSON(modelArtifacts);\n const infoTx = db.transaction(INFO_STORE_NAME, \"readwrite\");\n let infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const putInfoRequest = infoStore.put({ modelPath: this.modelPath, modelArtifactsInfo });\n let modelTx;\n putInfoRequest.onsuccess = () => {\n modelTx = db.transaction(MODEL_STORE_NAME, \"readwrite\");\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const putModelRequest = modelStore.put({\n modelPath: this.modelPath,\n modelArtifacts,\n modelArtifactsInfo\n });\n putModelRequest.onsuccess = () => resolve({ modelArtifactsInfo });\n putModelRequest.onerror = (error) => {\n infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const deleteInfoRequest = infoStore.delete(this.modelPath);\n deleteInfoRequest.onsuccess = () => {\n db.close();\n return reject(putModelRequest.error);\n };\n deleteInfoRequest.onerror = (error2) => {\n db.close();\n return reject(putModelRequest.error);\n };\n };\n };\n putInfoRequest.onerror = (error) => {\n db.close();\n return reject(putInfoRequest.error);\n };\n infoTx.oncomplete = () => {\n if (modelTx == null) {\n db.close();\n } else {\n modelTx.oncomplete = () => db.close();\n }\n };\n }\n };\n openRequest.onerror = (error) => reject(openRequest.error);\n });\n }\n};\nBrowserIndexedDB.URL_SCHEME = \"indexeddb://\";\nvar indexedDBRouter = (url) => {\n if (!env().getBool(\"IS_BROWSER\")) {\n return null;\n } else {\n if (!Array.isArray(url) && url.startsWith(BrowserIndexedDB.URL_SCHEME)) {\n return browserIndexedDB(url.slice(BrowserIndexedDB.URL_SCHEME.length));\n } else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(indexedDBRouter);\nIORouterRegistry.registerLoadRouter(indexedDBRouter);\nfunction browserIndexedDB(modelPath) {\n return new BrowserIndexedDB(modelPath);\n}\nfunction maybeStripScheme(key) {\n return key.startsWith(BrowserIndexedDB.URL_SCHEME) ? key.slice(BrowserIndexedDB.URL_SCHEME.length) : key;\n}\nvar BrowserIndexedDBManager = class {\n constructor() {\n this.indexedDB = getIndexedDBFactory();\n }\n async listModels() {\n return new Promise((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n openRequest.onsuccess = () => {\n const db = openRequest.result;\n const tx = db.transaction(INFO_STORE_NAME, \"readonly\");\n const store = tx.objectStore(INFO_STORE_NAME);\n const getAllInfoRequest = store.getAll();\n getAllInfoRequest.onsuccess = () => {\n const out = {};\n for (const item of getAllInfoRequest.result) {\n out[item.modelPath] = item.modelArtifactsInfo;\n }\n resolve(out);\n };\n getAllInfoRequest.onerror = (error) => {\n db.close();\n return reject(getAllInfoRequest.error);\n };\n tx.oncomplete = () => db.close();\n };\n openRequest.onerror = (error) => reject(openRequest.error);\n });\n }\n async removeModel(path) {\n path = maybeStripScheme(path);\n return new Promise((resolve, reject) => {\n const openRequest = this.indexedDB.open(DATABASE_NAME, DATABASE_VERSION);\n openRequest.onupgradeneeded = () => setUpDatabase(openRequest);\n openRequest.onsuccess = () => {\n const db = openRequest.result;\n const infoTx = db.transaction(INFO_STORE_NAME, \"readwrite\");\n const infoStore = infoTx.objectStore(INFO_STORE_NAME);\n const getInfoRequest = infoStore.get(path);\n let modelTx;\n getInfoRequest.onsuccess = () => {\n if (getInfoRequest.result == null) {\n db.close();\n return reject(new Error(`Cannot find model with path '${path}' in IndexedDB.`));\n } else {\n const deleteInfoRequest = infoStore.delete(path);\n const deleteModelData = () => {\n modelTx = db.transaction(MODEL_STORE_NAME, \"readwrite\");\n const modelStore = modelTx.objectStore(MODEL_STORE_NAME);\n const deleteModelRequest = modelStore.delete(path);\n deleteModelRequest.onsuccess = () => resolve(getInfoRequest.result.modelArtifactsInfo);\n deleteModelRequest.onerror = (error) => reject(getInfoRequest.error);\n };\n deleteInfoRequest.onsuccess = deleteModelData;\n deleteInfoRequest.onerror = (error) => {\n deleteModelData();\n db.close();\n return reject(getInfoRequest.error);\n };\n }\n };\n getInfoRequest.onerror = (error) => {\n db.close();\n return reject(getInfoRequest.error);\n };\n infoTx.oncomplete = () => {\n if (modelTx == null) {\n db.close();\n } else {\n modelTx.oncomplete = () => db.close();\n }\n };\n };\n openRequest.onerror = (error) => reject(openRequest.error);\n });\n }\n};\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/io/local_storage.js\nvar PATH_SEPARATOR = \"/\";\nvar PATH_PREFIX = \"tensorflowjs_models\";\nvar INFO_SUFFIX = \"info\";\nvar MODEL_TOPOLOGY_SUFFIX = \"model_topology\";\nvar WEIGHT_SPECS_SUFFIX = \"weight_specs\";\nvar WEIGHT_DATA_SUFFIX = \"weight_data\";\nvar MODEL_METADATA_SUFFIX = \"model_metadata\";\nfunction getModelKeys(path) {\n return {\n info: [PATH_PREFIX, path, INFO_SUFFIX].join(PATH_SEPARATOR),\n topology: [PATH_PREFIX, path, MODEL_TOPOLOGY_SUFFIX].join(PATH_SEPARATOR),\n weightSpecs: [PATH_PREFIX, path, WEIGHT_SPECS_SUFFIX].join(PATH_SEPARATOR),\n weightData: [PATH_PREFIX, path, WEIGHT_DATA_SUFFIX].join(PATH_SEPARATOR),\n modelMetadata: [PATH_PREFIX, path, MODEL_METADATA_SUFFIX].join(PATH_SEPARATOR)\n };\n}\nfunction removeItems(keys) {\n for (const key of Object.values(keys)) {\n window.localStorage.removeItem(key);\n }\n}\nfunction getModelPathFromKey(key) {\n const items = key.split(PATH_SEPARATOR);\n if (items.length < 3) {\n throw new Error(`Invalid key format: ${key}`);\n }\n return items.slice(1, items.length - 1).join(PATH_SEPARATOR);\n}\nfunction maybeStripScheme2(key) {\n return key.startsWith(BrowserLocalStorage.URL_SCHEME) ? key.slice(BrowserLocalStorage.URL_SCHEME.length) : key;\n}\nvar BrowserLocalStorage = class {\n constructor(modelPath) {\n if (!env().getBool(\"IS_BROWSER\") || typeof window === \"undefined\" || typeof window.localStorage === \"undefined\") {\n throw new Error(\"The current environment does not support local storage.\");\n }\n this.LS = window.localStorage;\n if (modelPath == null || !modelPath) {\n throw new Error(\"For local storage, modelPath must not be null, undefined or empty.\");\n }\n this.modelPath = modelPath;\n this.keys = getModelKeys(this.modelPath);\n }\n async save(modelArtifacts) {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error(\"BrowserLocalStorage.save() does not support saving model topology in binary formats yet.\");\n } else {\n const topology = JSON.stringify(modelArtifacts.modelTopology);\n const weightSpecs = JSON.stringify(modelArtifacts.weightSpecs);\n const modelArtifactsInfo = getModelArtifactsInfoForJSON(modelArtifacts);\n try {\n this.LS.setItem(this.keys.info, JSON.stringify(modelArtifactsInfo));\n this.LS.setItem(this.keys.topology, topology);\n this.LS.setItem(this.keys.weightSpecs, weightSpecs);\n this.LS.setItem(this.keys.weightData, arrayBufferToBase64String(modelArtifacts.weightData));\n const metadata = {\n format: modelArtifacts.format,\n generatedBy: modelArtifacts.generatedBy,\n convertedBy: modelArtifacts.convertedBy,\n signature: modelArtifacts.signature != null ? modelArtifacts.signature : void 0,\n userDefinedMetadata: modelArtifacts.userDefinedMetadata != null ? modelArtifacts.userDefinedMetadata : void 0,\n modelInitializer: modelArtifacts.modelInitializer != null ? modelArtifacts.modelInitializer : void 0,\n trainingConfig: modelArtifacts.trainingConfig != null ? modelArtifacts.trainingConfig : void 0\n };\n this.LS.setItem(this.keys.modelMetadata, JSON.stringify(metadata));\n return { modelArtifactsInfo };\n } catch (err) {\n removeItems(this.keys);\n throw new Error(`Failed to save model '${this.modelPath}' to local storage: size quota being exceeded is a possible cause of this failure: modelTopologyBytes=${modelArtifactsInfo.modelTopologyBytes}, weightSpecsBytes=${modelArtifactsInfo.weightSpecsBytes}, weightDataBytes=${modelArtifactsInfo.weightDataBytes}.`);\n }\n }\n }\n async load() {\n const info = JSON.parse(this.LS.getItem(this.keys.info));\n if (info == null) {\n throw new Error(`In local storage, there is no model with name '${this.modelPath}'`);\n }\n if (info.modelTopologyType !== \"JSON\") {\n throw new Error(\"BrowserLocalStorage does not support loading non-JSON model topology yet.\");\n }\n const out = {};\n const topology = JSON.parse(this.LS.getItem(this.keys.topology));\n if (topology == null) {\n throw new Error(`In local storage, the topology of model '${this.modelPath}' is missing.`);\n }\n out.modelTopology = topology;\n const weightSpecs = JSON.parse(this.LS.getItem(this.keys.weightSpecs));\n if (weightSpecs == null) {\n throw new Error(`In local storage, the weight specs of model '${this.modelPath}' are missing.`);\n }\n out.weightSpecs = weightSpecs;\n const metadataString = this.LS.getItem(this.keys.modelMetadata);\n if (metadataString != null) {\n const metadata = JSON.parse(metadataString);\n out.format = metadata.format;\n out.generatedBy = metadata.generatedBy;\n out.convertedBy = metadata.convertedBy;\n if (metadata.signature != null) {\n out.signature = metadata.signature;\n }\n if (metadata.userDefinedMetadata != null) {\n out.userDefinedMetadata = metadata.userDefinedMetadata;\n }\n if (metadata.modelInitializer != null) {\n out.modelInitializer = metadata.modelInitializer;\n }\n if (metadata.trainingConfig != null) {\n out.trainingConfig = metadata.trainingConfig;\n }\n }\n const weightDataBase64 = this.LS.getItem(this.keys.weightData);\n if (weightDataBase64 == null) {\n throw new Error(`In local storage, the binary weight values of model '${this.modelPath}' are missing.`);\n }\n out.weightData = base64StringToArrayBuffer(weightDataBase64);\n return out;\n }\n};\nBrowserLocalStorage.URL_SCHEME = \"localstorage://\";\nvar localStorageRouter = (url) => {\n if (!env().getBool(\"IS_BROWSER\")) {\n return null;\n } else {\n if (!Array.isArray(url) && url.startsWith(BrowserLocalStorage.URL_SCHEME)) {\n return browserLocalStorage(url.slice(BrowserLocalStorage.URL_SCHEME.length));\n } else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(localStorageRouter);\nIORouterRegistry.registerLoadRouter(localStorageRouter);\nfunction browserLocalStorage(modelPath) {\n return new BrowserLocalStorage(modelPath);\n}\nvar BrowserLocalStorageManager = class {\n constructor() {\n assert(env().getBool(\"IS_BROWSER\"), () => \"Current environment is not a web browser\");\n assert(typeof window === \"undefined\" || typeof window.localStorage !== \"undefined\", () => \"Current browser does not appear to support localStorage\");\n this.LS = window.localStorage;\n }\n async listModels() {\n const out = {};\n const prefix = PATH_PREFIX + PATH_SEPARATOR;\n const suffix = PATH_SEPARATOR + INFO_SUFFIX;\n for (let i2 = 0; i2 < this.LS.length; ++i2) {\n const key = this.LS.key(i2);\n if (key.startsWith(prefix) && key.endsWith(suffix)) {\n const modelPath = getModelPathFromKey(key);\n out[modelPath] = JSON.parse(this.LS.getItem(key));\n }\n }\n return out;\n }\n async removeModel(path) {\n path = maybeStripScheme2(path);\n const keys = getModelKeys(path);\n if (this.LS.getItem(keys.info) == null) {\n throw new Error(`Cannot find model at path '${path}'`);\n }\n const info = JSON.parse(this.LS.getItem(keys.info));\n removeItems(keys);\n return info;\n }\n};\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/io/model_management.js\nvar URL_SCHEME_SUFFIX = \"://\";\nvar ModelStoreManagerRegistry = class {\n constructor() {\n this.managers = {};\n }\n static getInstance() {\n if (ModelStoreManagerRegistry.instance == null) {\n ModelStoreManagerRegistry.instance = new ModelStoreManagerRegistry();\n }\n return ModelStoreManagerRegistry.instance;\n }\n static registerManager(scheme, manager) {\n assert(scheme != null, () => \"scheme must not be undefined or null.\");\n if (scheme.endsWith(URL_SCHEME_SUFFIX)) {\n scheme = scheme.slice(0, scheme.indexOf(URL_SCHEME_SUFFIX));\n }\n assert(scheme.length > 0, () => \"scheme must not be an empty string.\");\n const registry = ModelStoreManagerRegistry.getInstance();\n assert(registry.managers[scheme] == null, () => `A model store manager is already registered for scheme '${scheme}'.`);\n registry.managers[scheme] = manager;\n }\n static getManager(scheme) {\n const manager = ModelStoreManagerRegistry.getInstance().managers[scheme];\n if (manager == null) {\n throw new Error(`Cannot find model manager for scheme '${scheme}'`);\n }\n return manager;\n }\n static getSchemes() {\n return Object.keys(ModelStoreManagerRegistry.getInstance().managers);\n }\n};\nfunction parseURL(url) {\n if (url.indexOf(URL_SCHEME_SUFFIX) === -1) {\n throw new Error(`The url string provided does not contain a scheme. Supported schemes are: ${ModelStoreManagerRegistry.getSchemes().join(\",\")}`);\n }\n return {\n scheme: url.split(URL_SCHEME_SUFFIX)[0],\n path: url.split(URL_SCHEME_SUFFIX)[1]\n };\n}\nasync function cloneModelInternal(sourceURL, destURL, deleteSource = false) {\n assert(sourceURL !== destURL, () => `Old path and new path are the same: '${sourceURL}'`);\n const loadHandlers = IORouterRegistry.getLoadHandlers(sourceURL);\n assert(loadHandlers.length > 0, () => `Copying failed because no load handler is found for source URL ${sourceURL}.`);\n assert(loadHandlers.length < 2, () => `Copying failed because more than one (${loadHandlers.length}) load handlers for source URL ${sourceURL}.`);\n const loadHandler = loadHandlers[0];\n const saveHandlers = IORouterRegistry.getSaveHandlers(destURL);\n assert(saveHandlers.length > 0, () => `Copying failed because no save handler is found for destination URL ${destURL}.`);\n assert(saveHandlers.length < 2, () => `Copying failed because more than one (${loadHandlers.length}) save handlers for destination URL ${destURL}.`);\n const saveHandler = saveHandlers[0];\n const sourceScheme = parseURL(sourceURL).scheme;\n const sourcePath = parseURL(sourceURL).path;\n const sameMedium = sourceScheme === parseURL(sourceURL).scheme;\n const modelArtifacts = await loadHandler.load();\n if (deleteSource && sameMedium) {\n await ModelStoreManagerRegistry.getManager(sourceScheme).removeModel(sourcePath);\n }\n const saveResult = await saveHandler.save(modelArtifacts);\n if (deleteSource && !sameMedium) {\n await ModelStoreManagerRegistry.getManager(sourceScheme).removeModel(sourcePath);\n }\n return saveResult.modelArtifactsInfo;\n}\nasync function listModels() {\n const schemes = ModelStoreManagerRegistry.getSchemes();\n const out = {};\n for (const scheme of schemes) {\n const schemeOut = await ModelStoreManagerRegistry.getManager(scheme).listModels();\n for (const path in schemeOut) {\n const url = scheme + URL_SCHEME_SUFFIX + path;\n out[url] = schemeOut[path];\n }\n }\n return out;\n}\nasync function removeModel(url) {\n const schemeAndPath = parseURL(url);\n const manager = ModelStoreManagerRegistry.getManager(schemeAndPath.scheme);\n return manager.removeModel(schemeAndPath.path);\n}\nasync function copyModel(sourceURL, destURL) {\n const deleteSource = false;\n return cloneModelInternal(sourceURL, destURL, deleteSource);\n}\nasync function moveModel(sourceURL, destURL) {\n const deleteSource = true;\n return cloneModelInternal(sourceURL, destURL, deleteSource);\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/platforms/platform_browser.js\nvar PlatformBrowser = class {\n fetch(path, init2) {\n return fetch(path, init2);\n }\n now() {\n return performance.now();\n }\n encode(text, encoding) {\n if (encoding !== \"utf-8\" && encoding !== \"utf8\") {\n throw new Error(`Browser's encoder only supports utf-8, but got ${encoding}`);\n }\n if (this.textEncoder == null) {\n this.textEncoder = new TextEncoder();\n }\n return this.textEncoder.encode(text);\n }\n decode(bytes, encoding) {\n return new TextDecoder(encoding).decode(bytes);\n }\n};\nif (env().get(\"IS_BROWSER\")) {\n env().setPlatform(\"browser\", new PlatformBrowser());\n try {\n ModelStoreManagerRegistry.registerManager(BrowserLocalStorage.URL_SCHEME, new BrowserLocalStorageManager());\n } catch (err) {\n }\n try {\n ModelStoreManagerRegistry.registerManager(BrowserIndexedDB.URL_SCHEME, new BrowserIndexedDBManager());\n } catch (err) {\n }\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/platforms/platform_node.js\nvar getNodeFetch = {\n importFetch: () => require_browser()\n};\nvar systemFetch;\nvar PlatformNode = class {\n constructor() {\n this.util = require_util();\n this.textEncoder = new this.util.TextEncoder();\n }\n fetch(path, requestInits) {\n if (env().global.fetch != null) {\n return env().global.fetch(path, requestInits);\n }\n if (systemFetch == null) {\n systemFetch = getNodeFetch.importFetch();\n }\n return systemFetch(path, requestInits);\n }\n now() {\n const time2 = process.hrtime();\n return time2[0] * 1e3 + time2[1] / 1e6;\n }\n encode(text, encoding) {\n if (encoding !== \"utf-8\" && encoding !== \"utf8\") {\n throw new Error(`Node built-in encoder only supports utf-8, but got ${encoding}`);\n }\n return this.textEncoder.encode(text);\n }\n decode(bytes, encoding) {\n if (bytes.length === 0) {\n return \"\";\n }\n return new this.util.TextDecoder(encoding).decode(bytes);\n }\n};\nif (env().get(\"IS_NODE\") && !env().get(\"IS_BROWSER\")) {\n env().setPlatform(\"node\", new PlatformNode());\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/buffer.js\nfunction buffer(shape, dtype = \"float32\", values) {\n dtype = dtype || \"float32\";\n assertNonNegativeIntegerDimensions(shape);\n return new TensorBuffer(shape, dtype, values);\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/cast.js\nfunction cast_(x, dtype) {\n const $x = convertToTensor(x, \"x\", \"cast\");\n if (!isValidDtype(dtype)) {\n throw new Error(`Failed to cast to unknown dtype ${dtype}`);\n }\n if (dtype === \"string\" && $x.dtype !== \"string\" || dtype !== \"string\" && $x.dtype === \"string\") {\n throw new Error(\"Only strings can be casted to strings\");\n }\n const inputs = { x: $x };\n const attrs = { dtype };\n return ENGINE.runKernel(Cast, inputs, attrs);\n}\nvar cast = op({ cast_ });\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/clone.js\nfunction clone_(x) {\n const $x = convertToTensor(x, \"x\", \"clone\", \"string_or_numeric\");\n const inputs = { x: $x };\n return ENGINE.runKernel(Identity, inputs);\n}\nvar clone = op({ clone_ });\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/print.js\nfunction print(x, verbose = false) {\n console.log(x.toString(verbose));\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/base_side_effects.js\ngetOrMakeEngine();\nvar opHandler2 = {\n buffer,\n cast,\n clone,\n print\n};\nsetOpHandler(opHandler2);\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/io/io.js\nvar io_exports = {};\n__export(io_exports, {\n browserFiles: () => browserFiles,\n browserHTTPRequest: () => browserHTTPRequest,\n concatenateArrayBuffers: () => concatenateArrayBuffers,\n copyModel: () => copyModel,\n decodeWeights: () => decodeWeights,\n encodeWeights: () => encodeWeights,\n fromMemory: () => fromMemory,\n fromMemorySync: () => fromMemorySync,\n getLoadHandlers: () => getLoadHandlers,\n getModelArtifactsForJSON: () => getModelArtifactsForJSON,\n getModelArtifactsInfoForJSON: () => getModelArtifactsInfoForJSON,\n getSaveHandlers: () => getSaveHandlers,\n http: () => http,\n isHTTPScheme: () => isHTTPScheme,\n listModels: () => listModels,\n loadWeights: () => loadWeights,\n moveModel: () => moveModel,\n registerLoadRouter: () => registerLoadRouter,\n registerSaveRouter: () => registerSaveRouter,\n removeModel: () => removeModel,\n weightsLoaderFactory: () => weightsLoaderFactory,\n withSaveHandler: () => withSaveHandler,\n withSaveHandlerSync: () => withSaveHandlerSync\n});\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/io/browser_files.js\nvar DEFAULT_FILE_NAME_PREFIX = \"model\";\nvar DEFAULT_JSON_EXTENSION_NAME = \".json\";\nvar DEFAULT_WEIGHT_DATA_EXTENSION_NAME = \".weights.bin\";\nfunction defer(f) {\n return new Promise((resolve) => setTimeout(resolve)).then(f);\n}\nvar BrowserDownloads = class {\n constructor(fileNamePrefix) {\n if (!env().getBool(\"IS_BROWSER\")) {\n throw new Error(\"browserDownloads() cannot proceed because the current environment is not a browser.\");\n }\n if (fileNamePrefix.startsWith(BrowserDownloads.URL_SCHEME)) {\n fileNamePrefix = fileNamePrefix.slice(BrowserDownloads.URL_SCHEME.length);\n }\n if (fileNamePrefix == null || fileNamePrefix.length === 0) {\n fileNamePrefix = DEFAULT_FILE_NAME_PREFIX;\n }\n this.modelJsonFileName = fileNamePrefix + DEFAULT_JSON_EXTENSION_NAME;\n this.weightDataFileName = fileNamePrefix + DEFAULT_WEIGHT_DATA_EXTENSION_NAME;\n }\n async save(modelArtifacts) {\n if (typeof document === \"undefined\") {\n throw new Error(\"Browser downloads are not supported in this environment since `document` is not present\");\n }\n const weightsURL = window.URL.createObjectURL(new Blob([modelArtifacts.weightData], { type: \"application/octet-stream\" }));\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error(\"BrowserDownloads.save() does not support saving model topology in binary formats yet.\");\n } else {\n const weightsManifest = [{\n paths: [\"./\" + this.weightDataFileName],\n weights: modelArtifacts.weightSpecs\n }];\n const modelJSON = getModelJSONForModelArtifacts(modelArtifacts, weightsManifest);\n const modelJsonURL = window.URL.createObjectURL(new Blob([JSON.stringify(modelJSON)], { type: \"application/json\" }));\n const jsonAnchor = this.modelJsonAnchor == null ? document.createElement(\"a\") : this.modelJsonAnchor;\n jsonAnchor.download = this.modelJsonFileName;\n jsonAnchor.href = modelJsonURL;\n await defer(() => jsonAnchor.dispatchEvent(new MouseEvent(\"click\")));\n if (modelArtifacts.weightData != null) {\n const weightDataAnchor = this.weightDataAnchor == null ? document.createElement(\"a\") : this.weightDataAnchor;\n weightDataAnchor.download = this.weightDataFileName;\n weightDataAnchor.href = weightsURL;\n await defer(() => weightDataAnchor.dispatchEvent(new MouseEvent(\"click\")));\n }\n return { modelArtifactsInfo: getModelArtifactsInfoForJSON(modelArtifacts) };\n }\n }\n};\nBrowserDownloads.URL_SCHEME = \"downloads://\";\nvar BrowserFiles = class {\n constructor(files) {\n if (files == null || files.length < 1) {\n throw new Error(`When calling browserFiles, at least 1 file is required, but received ${files}`);\n }\n this.jsonFile = files[0];\n this.weightsFiles = files.slice(1);\n }\n async load() {\n return new Promise((resolve, reject) => {\n const jsonReader = new FileReader();\n jsonReader.onload = (event) => {\n const modelJSON = JSON.parse(event.target.result);\n const modelTopology = modelJSON.modelTopology;\n if (modelTopology == null) {\n reject(new Error(`modelTopology field is missing from file ${this.jsonFile.name}`));\n return;\n }\n const weightsManifest = modelJSON.weightsManifest;\n if (weightsManifest == null) {\n reject(new Error(`weightManifest field is missing from file ${this.jsonFile.name}`));\n return;\n }\n if (this.weightsFiles.length === 0) {\n resolve({ modelTopology });\n return;\n }\n const modelArtifactsPromise = getModelArtifactsForJSON(modelJSON, (weightsManifest2) => this.loadWeights(weightsManifest2));\n resolve(modelArtifactsPromise);\n };\n jsonReader.onerror = (error) => reject(`Failed to read model topology and weights manifest JSON from file '${this.jsonFile.name}'. BrowserFiles supports loading Keras-style tf.Model artifacts only.`);\n jsonReader.readAsText(this.jsonFile);\n });\n }\n loadWeights(weightsManifest) {\n const weightSpecs = [];\n const paths = [];\n for (const entry of weightsManifest) {\n weightSpecs.push(...entry.weights);\n paths.push(...entry.paths);\n }\n const pathToFile = this.checkManifestAndWeightFiles(weightsManifest);\n const promises = paths.map((path) => this.loadWeightsFile(path, pathToFile[path]));\n return Promise.all(promises).then((buffers) => [weightSpecs, concatenateArrayBuffers(buffers)]);\n }\n loadWeightsFile(path, file) {\n return new Promise((resolve, reject) => {\n const weightFileReader = new FileReader();\n weightFileReader.onload = (event) => {\n const weightData = event.target.result;\n resolve(weightData);\n };\n weightFileReader.onerror = (error) => reject(`Failed to weights data from file of path '${path}'.`);\n weightFileReader.readAsArrayBuffer(file);\n });\n }\n checkManifestAndWeightFiles(manifest) {\n const basenames = [];\n const fileNames = this.weightsFiles.map((file) => basename(file.name));\n const pathToFile = {};\n for (const group of manifest) {\n group.paths.forEach((path) => {\n const pathBasename = basename(path);\n if (basenames.indexOf(pathBasename) !== -1) {\n throw new Error(`Duplicate file basename found in weights manifest: '${pathBasename}'`);\n }\n basenames.push(pathBasename);\n if (fileNames.indexOf(pathBasename) === -1) {\n throw new Error(`Weight file with basename '${pathBasename}' is not provided.`);\n } else {\n pathToFile[path] = this.weightsFiles[fileNames.indexOf(pathBasename)];\n }\n });\n }\n if (basenames.length !== this.weightsFiles.length) {\n throw new Error(`Mismatch in the number of files in weights manifest (${basenames.length}) and the number of weight files provided (${this.weightsFiles.length}).`);\n }\n return pathToFile;\n }\n};\nvar browserDownloadsRouter = (url) => {\n if (!env().getBool(\"IS_BROWSER\")) {\n return null;\n } else {\n if (!Array.isArray(url) && url.startsWith(BrowserDownloads.URL_SCHEME)) {\n return browserDownloads(url.slice(BrowserDownloads.URL_SCHEME.length));\n } else {\n return null;\n }\n }\n};\nIORouterRegistry.registerSaveRouter(browserDownloadsRouter);\nfunction browserDownloads(fileNamePrefix = \"model\") {\n return new BrowserDownloads(fileNamePrefix);\n}\nfunction browserFiles(files) {\n return new BrowserFiles(files);\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/io/progress.js\nfunction monitorPromisesProgress(promises, onProgress, startFraction, endFraction) {\n checkPromises(promises);\n startFraction = startFraction == null ? 0 : startFraction;\n endFraction = endFraction == null ? 1 : endFraction;\n checkFraction(startFraction, endFraction);\n let resolvedPromise = 0;\n const registerMonitor = (promise) => {\n promise.then((value) => {\n const fraction = startFraction + ++resolvedPromise / promises.length * (endFraction - startFraction);\n onProgress(fraction);\n return value;\n });\n return promise;\n };\n function checkPromises(promises2) {\n assert(promises2 != null && Array.isArray(promises2) && promises2.length > 0, () => \"promises must be a none empty array\");\n }\n function checkFraction(startFraction2, endFraction2) {\n assert(startFraction2 >= 0 && startFraction2 <= 1, () => `Progress fraction must be in range [0, 1], but got startFraction ${startFraction2}`);\n assert(endFraction2 >= 0 && endFraction2 <= 1, () => `Progress fraction must be in range [0, 1], but got endFraction ${endFraction2}`);\n assert(endFraction2 >= startFraction2, () => `startFraction must be no more than endFraction, but got startFraction ${startFraction2} and endFraction ${endFraction2}`);\n }\n return Promise.all(promises.map(registerMonitor));\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/io/weights_loader.js\nasync function loadWeightsAsArrayBuffer(fetchURLs, loadOptions) {\n if (loadOptions == null) {\n loadOptions = {};\n }\n const fetchFunc = loadOptions.fetchFunc == null ? env().platform.fetch : loadOptions.fetchFunc;\n const requests = fetchURLs.map((fetchURL) => fetchFunc(fetchURL, loadOptions.requestInit, { isBinary: true }));\n const fetchStartFraction = 0;\n const fetchEndFraction = 0.5;\n const responses = loadOptions.onProgress == null ? await Promise.all(requests) : await monitorPromisesProgress(requests, loadOptions.onProgress, fetchStartFraction, fetchEndFraction);\n const bufferPromises = responses.map((response) => response.arrayBuffer());\n const bufferStartFraction = 0.5;\n const bufferEndFraction = 1;\n const buffers = loadOptions.onProgress == null ? await Promise.all(bufferPromises) : await monitorPromisesProgress(bufferPromises, loadOptions.onProgress, bufferStartFraction, bufferEndFraction);\n return buffers;\n}\nasync function loadWeights(manifest, filePathPrefix = \"\", weightNames, requestInit) {\n const fetchWeights = (fetchUrls) => loadWeightsAsArrayBuffer(fetchUrls, { requestInit });\n const loadWeights2 = weightsLoaderFactory(fetchWeights);\n return loadWeights2(manifest, filePathPrefix, weightNames);\n}\nfunction weightsLoaderFactory(fetchWeightsFunction) {\n return async (manifest, filePathPrefix = \"\", weightNames) => {\n const groupIndicesToFetchMap = manifest.map(() => false);\n const groupWeightsToFetch = {};\n const weightsFound = weightNames != null ? weightNames.map(() => false) : [];\n const allManifestWeightNames = [];\n manifest.forEach((manifestGroupConfig, groupIndex) => {\n let groupOffset = 0;\n manifestGroupConfig.weights.forEach((weightsEntry) => {\n const rawDtype = \"quantization\" in weightsEntry ? weightsEntry.quantization.dtype : weightsEntry.dtype;\n const weightsBytes = DTYPE_VALUE_SIZE_MAP[rawDtype] * sizeFromShape(weightsEntry.shape);\n const enqueueWeightsForFetchingFn = () => {\n groupIndicesToFetchMap[groupIndex] = true;\n if (groupWeightsToFetch[groupIndex] == null) {\n groupWeightsToFetch[groupIndex] = [];\n }\n groupWeightsToFetch[groupIndex].push({\n manifestEntry: weightsEntry,\n groupOffset,\n sizeBytes: weightsBytes\n });\n };\n if (weightNames != null) {\n weightNames.forEach((weightName, weightIndex) => {\n if (weightName === weightsEntry.name) {\n enqueueWeightsForFetchingFn();\n weightsFound[weightIndex] = true;\n }\n });\n } else {\n enqueueWeightsForFetchingFn();\n }\n allManifestWeightNames.push(weightsEntry.name);\n groupOffset += weightsBytes;\n });\n });\n if (!weightsFound.every((found) => found)) {\n const weightsNotFound = weightNames.filter((_, i2) => !weightsFound[i2]);\n throw new Error(`Could not find weights in manifest with names: ${weightsNotFound.join(\", \")}. \nManifest JSON has weights with names: ${allManifestWeightNames.join(\", \")}.`);\n }\n const groupIndicesToFetch = groupIndicesToFetchMap.reduce((accumulator, shouldFetch, i2) => {\n if (shouldFetch) {\n accumulator.push(i2);\n }\n return accumulator;\n }, []);\n const fetchUrls = [];\n groupIndicesToFetch.forEach((i2) => {\n manifest[i2].paths.forEach((filepath) => {\n const fetchUrl = filePathPrefix + (!filePathPrefix.endsWith(\"/\") ? \"/\" : \"\") + filepath;\n fetchUrls.push(fetchUrl);\n });\n });\n const buffers = await fetchWeightsFunction(fetchUrls);\n const weightsTensorMap = {};\n let bufferIndexOffset = 0;\n groupIndicesToFetch.forEach((i2) => {\n const numBuffers = manifest[i2].paths.length;\n let groupBytes = 0;\n for (let i3 = 0; i3 < numBuffers; i3++) {\n groupBytes += buffers[bufferIndexOffset + i3].byteLength;\n }\n const groupBuffer = new ArrayBuffer(groupBytes);\n const groupByteBuffer = new Uint8Array(groupBuffer);\n let groupBufferOffset = 0;\n for (let i3 = 0; i3 < numBuffers; i3++) {\n const buffer2 = new Uint8Array(buffers[bufferIndexOffset + i3]);\n groupByteBuffer.set(buffer2, groupBufferOffset);\n groupBufferOffset += buffer2.byteLength;\n }\n const weightsEntries = groupWeightsToFetch[i2];\n weightsEntries.forEach((weightsEntry) => {\n const byteBuffer = groupBuffer.slice(weightsEntry.groupOffset, weightsEntry.groupOffset + weightsEntry.sizeBytes);\n const nameToTensorMap = decodeWeights(byteBuffer, [weightsEntry.manifestEntry]);\n for (const name in nameToTensorMap) {\n weightsTensorMap[name] = nameToTensorMap[name];\n }\n });\n bufferIndexOffset += numBuffers;\n });\n return weightsTensorMap;\n };\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/io/http.js\nvar OCTET_STREAM_MIME_TYPE = \"application/octet-stream\";\nvar JSON_TYPE = \"application/json\";\nvar HTTPRequest = class {\n constructor(path, loadOptions) {\n this.DEFAULT_METHOD = \"POST\";\n if (loadOptions == null) {\n loadOptions = {};\n }\n this.weightPathPrefix = loadOptions.weightPathPrefix;\n this.onProgress = loadOptions.onProgress;\n this.weightUrlConverter = loadOptions.weightUrlConverter;\n if (loadOptions.fetchFunc != null) {\n assert(typeof loadOptions.fetchFunc === \"function\", () => \"Must pass a function that matches the signature of `fetch` (see https://developer.mozilla.org/en-US/docs/Web/API/Fetch_API)\");\n this.fetch = loadOptions.fetchFunc;\n } else {\n this.fetch = env().platform.fetch;\n }\n assert(path != null && path.length > 0, () => \"URL path for http must not be null, undefined or empty.\");\n if (Array.isArray(path)) {\n assert(path.length === 2, () => `URL paths for http must have a length of 2, (actual length is ${path.length}).`);\n }\n this.path = path;\n if (loadOptions.requestInit != null && loadOptions.requestInit.body != null) {\n throw new Error(\"requestInit is expected to have no pre-existing body, but has one.\");\n }\n this.requestInit = loadOptions.requestInit || {};\n }\n async save(modelArtifacts) {\n if (modelArtifacts.modelTopology instanceof ArrayBuffer) {\n throw new Error(\"BrowserHTTPRequest.save() does not support saving model topology in binary formats yet.\");\n }\n const init2 = Object.assign({ method: this.DEFAULT_METHOD }, this.requestInit);\n init2.body = new FormData();\n const weightsManifest = [{\n paths: [\"./model.weights.bin\"],\n weights: modelArtifacts.weightSpecs\n }];\n const modelTopologyAndWeightManifest = getModelJSONForModelArtifacts(modelArtifacts, weightsManifest);\n init2.body.append(\"model.json\", new Blob([JSON.stringify(modelTopologyAndWeightManifest)], { type: JSON_TYPE }), \"model.json\");\n if (modelArtifacts.weightData != null) {\n init2.body.append(\"model.weights.bin\", new Blob([modelArtifacts.weightData], { type: OCTET_STREAM_MIME_TYPE }), \"model.weights.bin\");\n }\n const response = await this.fetch(this.path, init2);\n if (response.ok) {\n return {\n modelArtifactsInfo: getModelArtifactsInfoForJSON(modelArtifacts),\n responses: [response]\n };\n } else {\n throw new Error(`BrowserHTTPRequest.save() failed due to HTTP response status ${response.status}.`);\n }\n }\n async load() {\n const modelConfigRequest = await this.fetch(this.path, this.requestInit);\n if (!modelConfigRequest.ok) {\n throw new Error(`Request to ${this.path} failed with status code ${modelConfigRequest.status}. Please verify this URL points to the model JSON of the model to load.`);\n }\n let modelJSON;\n try {\n modelJSON = await modelConfigRequest.json();\n } catch (e2) {\n let message = `Failed to parse model JSON of response from ${this.path}.`;\n if (this.path.endsWith(\".pb\")) {\n message += \" Your path contains a .pb file extension. Support for .pb models have been removed in TensorFlow.js 1.0 in favor of .json models. You can re-convert your Python TensorFlow model using the TensorFlow.js 1.0 conversion scripts or you can convert your.pb models with the 'pb2json'NPM script in the tensorflow/tfjs-converter repository.\";\n } else {\n message += \" Please make sure the server is serving valid JSON for this request.\";\n }\n throw new Error(message);\n }\n const modelTopology = modelJSON.modelTopology;\n const weightsManifest = modelJSON.weightsManifest;\n if (modelTopology == null && weightsManifest == null) {\n throw new Error(`The JSON from HTTP path ${this.path} contains neither model topology or manifest for weights.`);\n }\n return getModelArtifactsForJSON(modelJSON, (weightsManifest2) => this.loadWeights(weightsManifest2));\n }\n async loadWeights(weightsManifest) {\n const weightPath = Array.isArray(this.path) ? this.path[1] : this.path;\n const [prefix, suffix] = parseUrl(weightPath);\n const pathPrefix = this.weightPathPrefix || prefix;\n const weightSpecs = [];\n for (const entry of weightsManifest) {\n weightSpecs.push(...entry.weights);\n }\n const fetchURLs = [];\n const urlPromises = [];\n for (const weightsGroup of weightsManifest) {\n for (const path of weightsGroup.paths) {\n if (this.weightUrlConverter != null) {\n urlPromises.push(this.weightUrlConverter(path));\n } else {\n fetchURLs.push(pathPrefix + path + suffix);\n }\n }\n }\n if (this.weightUrlConverter) {\n fetchURLs.push(...await Promise.all(urlPromises));\n }\n const buffers = await loadWeightsAsArrayBuffer(fetchURLs, {\n requestInit: this.requestInit,\n fetchFunc: this.fetch,\n onProgress: this.onProgress\n });\n return [weightSpecs, concatenateArrayBuffers(buffers)];\n }\n};\nHTTPRequest.URL_SCHEME_REGEX = /^https?:\\/\\//;\nfunction parseUrl(url) {\n const lastSlash = url.lastIndexOf(\"/\");\n const lastSearchParam = url.lastIndexOf(\"?\");\n const prefix = url.substring(0, lastSlash);\n const suffix = lastSearchParam > lastSlash ? url.substring(lastSearchParam) : \"\";\n return [prefix + \"/\", suffix];\n}\nfunction isHTTPScheme(url) {\n return url.match(HTTPRequest.URL_SCHEME_REGEX) != null;\n}\nvar httpRouter = (url, loadOptions) => {\n if (typeof fetch === \"undefined\" && (loadOptions == null || loadOptions.fetchFunc == null)) {\n return null;\n } else {\n let isHTTP = true;\n if (Array.isArray(url)) {\n isHTTP = url.every((urlItem) => isHTTPScheme(urlItem));\n } else {\n isHTTP = isHTTPScheme(url);\n }\n if (isHTTP) {\n return http(url, loadOptions);\n }\n }\n return null;\n};\nIORouterRegistry.registerSaveRouter(httpRouter);\nIORouterRegistry.registerLoadRouter(httpRouter);\nfunction http(path, loadOptions) {\n return new HTTPRequest(path, loadOptions);\n}\nfunction browserHTTPRequest(path, loadOptions) {\n return http(path, loadOptions);\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/io/passthrough.js\nvar PassthroughLoader = class {\n constructor(modelArtifacts) {\n this.modelArtifacts = modelArtifacts;\n }\n load() {\n return this.modelArtifacts;\n }\n};\nvar PassthroughSaver = class {\n constructor(saveHandler) {\n this.saveHandler = saveHandler;\n }\n save(modelArtifacts) {\n return this.saveHandler(modelArtifacts);\n }\n};\nvar PassthroughAsync = class {\n constructor(handler) {\n if (handler.load) {\n this.load = () => Promise.resolve(handler.load());\n }\n if (handler.save) {\n this.save = (modelArtifacts) => Promise.resolve(handler.save(modelArtifacts));\n }\n }\n};\nfunction fromMemory(modelArtifacts, weightSpecs, weightData, trainingConfig) {\n const args = arguments;\n return new PassthroughAsync(fromMemorySync(...args));\n}\nfunction fromMemorySync(modelArtifacts, weightSpecs, weightData, trainingConfig) {\n if (arguments.length === 1) {\n const isModelArtifacts = modelArtifacts.modelTopology != null || modelArtifacts.weightSpecs != null;\n if (isModelArtifacts) {\n return new PassthroughLoader(modelArtifacts);\n } else {\n console.warn(\"Please call tf.io.fromMemory() with only one argument. The argument should be of type ModelArtifacts. The multi-argument signature of tf.io.fromMemory() has been deprecated and will be removed in a future release.\");\n return new PassthroughLoader({ modelTopology: modelArtifacts });\n }\n } else {\n console.warn(\"Please call tf.io.fromMemory() with only one argument. The argument should be of type ModelArtifacts. The multi-argument signature of tf.io.fromMemory() has been deprecated and will be removed in a future release.\");\n return new PassthroughLoader({\n modelTopology: modelArtifacts,\n weightSpecs,\n weightData,\n trainingConfig\n });\n }\n}\nfunction withSaveHandler(saveHandler) {\n return new PassthroughSaver(saveHandler);\n}\nfunction withSaveHandlerSync(saveHandler) {\n return new PassthroughSaver(saveHandler);\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/math.js\nvar math_exports = {};\n__export(math_exports, {\n confusionMatrix: () => confusionMatrix\n});\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/mat_mul.js\nfunction matMul_(a, b, transposeA = false, transposeB = false) {\n let $a = convertToTensor(a, \"a\", \"matMul\");\n let $b = convertToTensor(b, \"b\", \"matMul\");\n [$a, $b] = makeTypesMatch($a, $b);\n const inputs = { a: $a, b: $b };\n const attrs = { transposeA, transposeB };\n return ENGINE.runKernel(BatchMatMul, inputs, attrs);\n}\nvar matMul = op({ matMul_ });\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/one_hot.js\nfunction oneHot_(indices, depth, onValue = 1, offValue = 0) {\n if (depth < 2) {\n throw new Error(`Error in oneHot: depth must be >=2, but it is ${depth}`);\n }\n const $indices = convertToTensor(indices, \"indices\", \"oneHot\", \"int32\");\n const inputs = { indices: $indices };\n const attrs = { depth, onValue, offValue };\n return ENGINE.runKernel(OneHot, inputs, attrs);\n}\nvar oneHot = op({ oneHot_ });\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/globals.js\nfunction enableProdMode() {\n env().set(\"PROD\", true);\n}\nfunction enableDebugMode() {\n env().set(\"DEBUG\", true);\n}\nfunction disableDeprecationWarnings() {\n env().set(\"DEPRECATION_WARNINGS_ENABLED\", false);\n console.warn(`TensorFlow.js deprecation warnings have been disabled.`);\n}\nfunction deprecationWarn(msg) {\n if (env().getBool(\"DEPRECATION_WARNINGS_ENABLED\")) {\n console.warn(msg + \" You can disable deprecation warnings with tf.disableDeprecationWarnings().\");\n }\n}\nsetDeprecationWarningFn(deprecationWarn);\nfunction disposeVariables() {\n ENGINE.disposeVariables();\n}\nfunction engine() {\n return ENGINE;\n}\nfunction memory() {\n return ENGINE.memory();\n}\nfunction profile(f) {\n return ENGINE.profile(f);\n}\nfunction tidy(nameOrFn, fn) {\n return ENGINE.tidy(nameOrFn, fn);\n}\nfunction dispose(container) {\n const tensors = getTensorsInContainer(container);\n tensors.forEach((tensor2) => tensor2.dispose());\n}\nfunction keep(result) {\n return ENGINE.keep(result);\n}\nfunction time(f) {\n return ENGINE.time(f);\n}\nfunction setBackend(backendName) {\n return ENGINE.setBackend(backendName);\n}\nfunction ready() {\n return ENGINE.ready();\n}\nfunction getBackend() {\n return ENGINE.backendName;\n}\nfunction removeBackend(name) {\n ENGINE.removeBackend(name);\n}\nfunction findBackend(name) {\n return ENGINE.findBackend(name);\n}\nfunction findBackendFactory(name) {\n return ENGINE.findBackendFactory(name);\n}\nfunction registerBackend(name, factory, priority = 1) {\n return ENGINE.registerBackend(name, factory, priority);\n}\nfunction backend() {\n return ENGINE.backend;\n}\nfunction setPlatform(platformName, platform) {\n env().setPlatform(platformName, platform);\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/imag.js\nfunction imag_(input2) {\n const $input = convertToTensor(input2, \"input\", \"imag\");\n const inputs = { input: $input };\n return ENGINE.runKernel(Imag, inputs);\n}\nvar imag = op({ imag_ });\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/neg.js\nfunction neg_(x) {\n const $x = convertToTensor(x, \"x\", \"neg\");\n const inputs = { x: $x };\n return ENGINE.runKernel(Neg, inputs);\n}\nvar neg = op({ neg_ });\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/real.js\nfunction real_(input2) {\n const $input = convertToTensor(input2, \"input\", \"real\");\n const inputs = { input: $input };\n return ENGINE.runKernel(Real, inputs);\n}\nvar real = op({ real_ });\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/transpose.js\nfunction transpose_(x, perm, conjugate) {\n const $x = convertToTensor(x, \"x\", \"transpose\");\n if (perm == null) {\n perm = $x.shape.map((s2, i2) => i2).reverse();\n }\n assert($x.rank === perm.length, () => `Error in transpose: rank of input ${$x.rank} must match length of perm ${perm}.`);\n perm.forEach((axis) => {\n assert(axis >= 0 && axis < $x.rank, () => `All entries in 'perm' must be between 0 and ${$x.rank - 1} but got ${perm}`);\n });\n if ($x.rank <= 1) {\n return $x.clone();\n }\n const inputs = { x: $x };\n const attrs = { perm };\n if ($x.dtype === \"complex64\") {\n return tidy(() => {\n let $real = real($x);\n let $imag = imag($x);\n $real = ENGINE.runKernel(Transpose, { x: $real }, attrs);\n $imag = ENGINE.runKernel(Transpose, { x: $imag }, attrs);\n if (conjugate) {\n $imag = neg($imag);\n }\n return complex($real, $imag);\n });\n }\n return ENGINE.runKernel(Transpose, inputs, attrs);\n}\nvar transpose = op({ transpose_ });\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/confusion_matrix.js\nfunction confusionMatrix_(labels, predictions, numClasses) {\n const $labels = convertToTensor(labels, \"labels\", \"confusionMatrix\");\n const $predictions = convertToTensor(predictions, \"predictions\", \"confusionMatrix\");\n assert(numClasses == null || numClasses > 0 && Number.isInteger(numClasses), () => `If provided, numClasses must be a positive integer, but got ${numClasses}`);\n assert($labels.rank === 1, () => `Expected the rank of labels to be 1, but got ${$labels.rank}`);\n assert($predictions.rank === 1, () => `Expected the rank of predictions to be 1, but got ${$predictions.rank}`);\n assert($labels.shape[0] === $predictions.shape[0], () => `Mismatch in the number of examples: ${$labels.shape[0]} vs. ${$predictions.shape[0]}. Labels and predictions should have the same number of elements.`);\n assert(numClasses > 0 && Number.isInteger(numClasses), () => `numClasses is required to be a positive integer, but got ${numClasses}`);\n const oneHotLabels = oneHot(cast($labels, \"int32\"), numClasses);\n const oneHotPredictions = oneHot(cast($predictions, \"int32\"), numClasses);\n const oneHotLabelsT = transpose(oneHotLabels);\n const product = matMul(oneHotLabelsT, oneHotPredictions);\n return cast(product, \"int32\");\n}\nvar confusionMatrix = op({ confusionMatrix_ });\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/broadcast_util.js\nvar broadcast_util_exports = {};\n__export(broadcast_util_exports, {\n assertAndGetBroadcastShape: () => assertAndGetBroadcastShape,\n getBroadcastDims: () => getBroadcastDims,\n getReductionAxes: () => getReductionAxes\n});\nfunction getBroadcastDims(inShape, outShape) {\n const inRank = inShape.length;\n const dims = [];\n for (let i2 = 0; i2 < inRank; i2++) {\n const dim = inRank - 1 - i2;\n const a = inShape[dim] || 1;\n const b = outShape[outShape.length - 1 - i2] || 1;\n if (b > 1 && a === 1) {\n dims.unshift(dim);\n }\n }\n return dims;\n}\nfunction getReductionAxes(inShape, outShape) {\n const result = [];\n for (let i2 = 0; i2 < outShape.length; i2++) {\n const inDim = inShape[inShape.length - i2 - 1];\n const outAxis = outShape.length - i2 - 1;\n const outDim = outShape[outAxis];\n if (inDim == null || inDim === 1 && outDim > 1) {\n result.unshift(outAxis);\n }\n }\n return result;\n}\nfunction assertAndGetBroadcastShape(shapeA, shapeB) {\n const result = [];\n const l3 = Math.max(shapeA.length, shapeB.length);\n for (let i2 = 0; i2 < l3; i2++) {\n let a = shapeA[shapeA.length - i2 - 1];\n if (a == null) {\n a = 1;\n }\n let b = shapeB[shapeB.length - i2 - 1];\n if (b == null) {\n b = 1;\n }\n if (a === 1) {\n result.unshift(b);\n } else if (b === 1) {\n result.unshift(a);\n } else if (a !== b) {\n const errMsg = `Operands could not be broadcast together with shapes ${shapeA} and ${shapeB}.`;\n throw Error(errMsg);\n } else {\n result.unshift(a);\n }\n }\n return result;\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/browser.js\nvar browser_exports = {};\n__export(browser_exports, {\n fromPixels: () => fromPixels,\n fromPixelsAsync: () => fromPixelsAsync,\n toPixels: () => toPixels\n});\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/tensor3d.js\nfunction tensor3d(values, shape, dtype) {\n assertNonNull(values);\n if (shape != null && shape.length !== 3) {\n throw new Error(\"tensor3d() requires shape to have three numbers\");\n }\n const inferredShape = inferShape(values, dtype);\n if (inferredShape.length !== 3 && inferredShape.length !== 1) {\n throw new Error(\"tensor3d() requires values to be number[][][] or flat/TypedArray\");\n }\n if (inferredShape.length === 1 && shape == null) {\n throw new Error(\"tensor3d() requires shape to be provided when `values` are a flat array\");\n }\n return makeTensor(values, shape, inferredShape, dtype);\n}\n\n// node_modules/.pnpm/@tensorflow+tfjs-core@3.19.0/node_modules/@tensorflow/tfjs-core/dist/ops/browser.js\nvar fromPixels2DContext;\nfunction fromPixels_(pixels, numChannels = 3) {\n if (numChannels > 4) {\n throw new Error(\"Cannot construct Tensor with more than 4 channels from pixels.\");\n }\n if (pixels == null) {\n throw new Error(\"pixels passed to tf.browser.fromPixels() can not be null\");\n }\n let isPixelData2 = false;\n let isImageData = false;\n let isVideo = false;\n let isImage = false;\n let isCanvasLike = false;\n let isImageBitmap = false;\n if (pixels.data instanceof Uint8Array) {\n isPixelData2 = true;\n } else if (typeof ImageData !== \"undefined\" && pixels instanceof ImageData) {\n isImageData = true;\n } else if (typeof HTMLVideoElement !== \"undefined\" && pixels instanceof HTMLVideoElement) {\n isVideo = true;\n } else if (typeof HTMLImageElement !== \"undefined\" && pixels instanceof HTMLImageElement) {\n isImage = true;\n } else if (pixels.getContext != null) {\n isCanvasLike = true;\n } else if (typeof ImageBitmap !== \"undefined\" && pixels instanceof ImageBitmap) {\n isImageBitmap = true;\n } else {\n throw new Error(`pixels passed to tf.browser.fromPixels() must be either an HTMLVideoElement, HTMLImageElement, HTMLCanvasElement, ImageData in browser, or OffscreenCanvas, ImageData in webworker or {data: Uint32Array, width: number, height: number}, but was ${pixels.constructor.name}`);\n }\n if (isVideo) {\n const HAVE_CURRENT_DATA_READY_STATE = 2;\n if (isVideo && pixels.readyState < HAVE_CURRENT_DATA_READY_STATE) {\n throw new Error(\"The video element has not loaded data yet. Please wait for `loadeddata` event on the