add loaded property to model stats and mark models not loaded correctly.

Signed-off-by: Vladimir Mandic <mandic00@live.com>
main
Vladimir Mandic 2024-10-14 09:04:10 -04:00
parent df73c8247f
commit 49b25830b4
18 changed files with 60916 additions and 191 deletions

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -4,6 +4,105 @@
author: <https://github.com/vladmandic>'
*/
import*as m from"../../dist/human.esm.js";var v=1920,b={debug:!0,backend:"webgl",modelBasePath:"https://vladmandic.github.io/human-models/models/",filter:{enabled:!0,equalization:!1,flip:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!1},hand:{enabled:!1},object:{enabled:!1},segmentation:{enabled:!1},gesture:{enabled:!0}},e=new m.Human(b);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;e.draw.options.drawPoints=!0;var a={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},s={detectFPS:0,drawFPS:0,frames:0,averageMs:0},o=(...t)=>{a.log.innerText+=t.join(" ")+`
`,console.log(...t)},i=t=>a.fps.innerText=t,g=t=>a.perf.innerText="tensors:"+e.tf.memory().numTensors.toString()+" | performance: "+JSON.stringify(t).replace(/"|{|}/g,"").replace(/,/g," | ");async function f(){if(!a.video.paused){n.start===0&&(n.start=e.now()),await e.detect(a.video);let t=e.tf.memory().numTensors;t-n.tensors!==0&&o("allocated tensors:",t-n.tensors),n.tensors=t,s.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,s.frames++,s.averageMs=Math.round(1e3*(e.now()-n.start)/s.frames)/1e3,s.frames%100===0&&!a.video.paused&&o("performance",{...s,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(f)}async function u(){var d,r,c;if(!a.video.paused){let l=e.next(e.result),w=await e.image(a.video);e.draw.canvas(w.canvas,a.canvas);let p={bodyLabels:`person confidence [score] and ${(c=(r=(d=e.result)==null?void 0:d.body)==null?void 0:r[0])==null?void 0:c.keypoints.length} keypoints`};await e.draw.all(a.canvas,l,p),g(l.performance)}let t=e.now();s.drawFPS=Math.round(1e3*1e3/(t-n.draw))/1e3,n.draw=t,i(a.video.paused?"paused":`fps: ${s.detectFPS.toFixed(1).padStart(5," ")} detect | ${s.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(u,30)}async function h(){let d=(await e.webcam.enumerate())[0].deviceId,r=await e.webcam.start({element:a.video,crop:!1,width:v,id:d});o(r),a.canvas.width=e.webcam.width,a.canvas.height=e.webcam.height,a.canvas.onclick=async()=>{e.webcam.paused?await e.webcam.play():e.webcam.pause()}}async function y(){o("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),o("platform:",e.env.platform,"| agent:",e.env.agent),i("loading..."),await e.load(),o("backend:",e.tf.getBackend(),"| available:",e.env.backends),o("models stats:",e.models.stats()),o("models loaded:",e.models.loaded()),o("environment",e.env),i("initializing..."),await e.warmup(),await h(),await f(),await u()}window.onload=y;
// demo/typescript/index.ts
import * as H from "../../dist/human.esm.js";
var width = 1920;
var humanConfig = {
// user configuration for human, used to fine-tune behavior
debug: true,
backend: "webgl",
// cacheSensitivity: 0,
// cacheModels: false,
// warmup: 'none',
// modelBasePath: '../../models',
modelBasePath: "https://vladmandic.github.io/human-models/models/",
filter: { enabled: true, equalization: false, flip: false },
face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true }, antispoof: { enabled: true }, liveness: { enabled: true } },
body: { enabled: false },
hand: { enabled: false },
object: { enabled: false },
segmentation: { enabled: false },
gesture: { enabled: true }
};
var human = new H.Human(humanConfig);
human.env.perfadd = false;
human.draw.options.font = 'small-caps 18px "Lato"';
human.draw.options.lineHeight = 20;
human.draw.options.drawPoints = true;
var dom = {
// grab instances of dom objects so we dont have to look them up later
video: document.getElementById("video"),
canvas: document.getElementById("canvas"),
log: document.getElementById("log"),
fps: document.getElementById("status"),
perf: document.getElementById("performance")
};
var timestamp = { detect: 0, draw: 0, tensors: 0, start: 0 };
var fps = { detectFPS: 0, drawFPS: 0, frames: 0, averageMs: 0 };
var log = (...msg) => {
dom.log.innerText += msg.join(" ") + "\n";
console.log(...msg);
};
var status = (msg) => dom.fps.innerText = msg;
var perf = (msg) => dom.perf.innerText = "tensors:" + human.tf.memory().numTensors.toString() + " | performance: " + JSON.stringify(msg).replace(/"|{|}/g, "").replace(/,/g, " | ");
async function detectionLoop() {
if (!dom.video.paused) {
if (timestamp.start === 0) timestamp.start = human.now();
await human.detect(dom.video);
const tensors = human.tf.memory().numTensors;
if (tensors - timestamp.tensors !== 0) log("allocated tensors:", tensors - timestamp.tensors);
timestamp.tensors = tensors;
fps.detectFPS = Math.round(1e3 * 1e3 / (human.now() - timestamp.detect)) / 1e3;
fps.frames++;
fps.averageMs = Math.round(1e3 * (human.now() - timestamp.start) / fps.frames) / 1e3;
if (fps.frames % 100 === 0 && !dom.video.paused) log("performance", { ...fps, tensors: timestamp.tensors });
}
timestamp.detect = human.now();
requestAnimationFrame(detectionLoop);
}
async function drawLoop() {
var _a, _b, _c;
if (!dom.video.paused) {
const interpolated = human.next(human.result);
const processed = await human.image(dom.video);
human.draw.canvas(processed.canvas, dom.canvas);
const opt = { bodyLabels: `person confidence [score] and ${(_c = (_b = (_a = human.result) == null ? void 0 : _a.body) == null ? void 0 : _b[0]) == null ? void 0 : _c.keypoints.length} keypoints` };
await human.draw.all(dom.canvas, interpolated, opt);
perf(interpolated.performance);
}
const now = human.now();
fps.drawFPS = Math.round(1e3 * 1e3 / (now - timestamp.draw)) / 1e3;
timestamp.draw = now;
status(dom.video.paused ? "paused" : `fps: ${fps.detectFPS.toFixed(1).padStart(5, " ")} detect | ${fps.drawFPS.toFixed(1).padStart(5, " ")} draw`);
setTimeout(drawLoop, 30);
}
async function webCam() {
const devices = await human.webcam.enumerate();
const id = devices[0].deviceId;
const webcamStatus = await human.webcam.start({ element: dom.video, crop: false, width, id });
log(webcamStatus);
dom.canvas.width = human.webcam.width;
dom.canvas.height = human.webcam.height;
dom.canvas.onclick = async () => {
if (human.webcam.paused) await human.webcam.play();
else human.webcam.pause();
};
}
async function main() {
log("human version:", human.version, "| tfjs version:", human.tf.version["tfjs-core"]);
log("platform:", human.env.platform, "| agent:", human.env.agent);
status("loading...");
await human.load();
log("backend:", human.tf.getBackend(), "| available:", human.env.backends);
log("models stats:", human.models.stats());
log("models loaded:", human.models.loaded());
log("environment", human.env);
status("initializing...");
await human.warmup();
await webCam();
await detectionLoop();
await drawLoop();
}
window.onload = main;
//# sourceMappingURL=index.js.map

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

39
dist/human.esm.js vendored
View File

@ -536,6 +536,7 @@ var AG = (r15, e, t10, o) => {
};
var zp = (r15, e, t10) => (t10 = r15 != null ? _G(RG(r15)) : {}, AG(e || !r15 || !r15.__esModule ? QC(t10, "default", { value: r15, enumerable: true }) : t10, r15));
var U0 = Kt((ple, W0) => {
"use strict";
W0.exports = kt;
var ko = null;
try {
@ -826,10 +827,13 @@ var U0 = Kt((ple, W0) => {
};
});
var Ek = Kt(() => {
"use strict";
});
var $k = Kt(() => {
"use strict";
});
var o1 = Kt((r1, Ww) => {
"use strict";
(function(r15, e, t10) {
function o(i) {
var p = this, u = a();
@ -869,6 +873,7 @@ var o1 = Kt((r1, Ww) => {
})(r1, typeof Ww == "object" && Ww, typeof define == "function" && define);
});
var s1 = Kt((n1, Uw) => {
"use strict";
(function(r15, e, t10) {
function o(a) {
var i = this, p = "";
@ -900,6 +905,7 @@ var s1 = Kt((n1, Uw) => {
})(n1, typeof Uw == "object" && Uw, typeof define == "function" && define);
});
var i1 = Kt((a1, Gw) => {
"use strict";
(function(r15, e, t10) {
function o(a) {
var i = this, p = "";
@ -931,6 +937,7 @@ var i1 = Kt((a1, Gw) => {
})(a1, typeof Gw == "object" && Gw, typeof define == "function" && define);
});
var p1 = Kt((u1, Hw) => {
"use strict";
(function(r15, e, t10) {
function o(a) {
var i = this;
@ -971,6 +978,7 @@ var p1 = Kt((u1, Hw) => {
})(u1, typeof Hw == "object" && Hw, typeof define == "function" && define);
});
var l1 = Kt((c1, Kw) => {
"use strict";
(function(r15, e, t10) {
function o(a) {
var i = this;
@ -1009,6 +1017,7 @@ var l1 = Kt((c1, Kw) => {
})(c1, typeof Kw == "object" && Kw, typeof define == "function" && define);
});
var d1 = Kt((m1, qw) => {
"use strict";
(function(r15, e, t10) {
function o(a) {
var i = this, p = "";
@ -1040,8 +1049,10 @@ var d1 = Kt((m1, qw) => {
})(m1, typeof qw == "object" && qw, typeof define == "function" && define);
});
var f1 = Kt(() => {
"use strict";
});
var g1 = Kt((h1, Md) => {
"use strict";
(function(r15, e, t10) {
var o = 256, n = 6, s = 52, a = "random", i = t10.pow(o, n), p = t10.pow(2, s), u = p * 2, c = o - 1, l;
function m(C, S, k) {
@ -1110,6 +1121,7 @@ var g1 = Kt((h1, Md) => {
})(typeof self != "undefined" ? self : h1, [], Math);
});
var jw = Kt((s1e, x1) => {
"use strict";
var Dq = o1(), Aq = s1(), Fq = i1(), Pq = p1(), Oq = l1(), Mq = d1(), Ku = g1();
Ku.alea = Dq;
Ku.xor128 = Aq;
@ -1120,16 +1132,22 @@ var jw = Kt((s1e, x1) => {
x1.exports = Ku;
});
var Vv = Kt(() => {
"use strict";
});
var Wv = Kt(() => {
"use strict";
});
var LB = Kt(() => {
"use strict";
});
var BB = Kt(() => {
"use strict";
});
var zB = Kt(() => {
"use strict";
});
var VB = Kt((Wg, Gv) => {
"use strict";
var Uv = (() => {
var r15 = typeof document != "undefined" && document.currentScript ? document.currentScript.src : void 0;
return typeof __filename != "undefined" && (r15 = r15 || __filename), function(e) {
@ -2088,10 +2106,12 @@ var VB = Kt((Wg, Gv) => {
}) : typeof Wg == "object" && (Wg.WasmBackendModuleThreadedSimd = Uv);
});
var UB = Kt((e3t, WB) => {
"use strict";
WB.exports.wasmWorkerContents = `"use strict";var Module={};var ENVIRONMENT_IS_NODE=typeof process=="object"&&typeof process.versions=="object"&&typeof process.versions.node=="string";if(ENVIRONMENT_IS_NODE){var nodeWorkerThreads=require("worker_threads");var parentPort=nodeWorkerThreads.parentPort;parentPort.on("message",data=>onmessage({data:data}));var fs=require("fs");Object.assign(global,{self:global,require:require,Module:Module,location:{href:__filename},Worker:nodeWorkerThreads.Worker,importScripts:function(f){(0,eval)(fs.readFileSync(f,"utf8")+"//# sourceURL="+f)},postMessage:function(msg){parentPort.postMessage(msg)},performance:global.performance||{now:function(){return Date.now()}}})}var initializedJS=false;var pendingNotifiedProxyingQueues=[];function threadPrintErr(){var text=Array.prototype.slice.call(arguments).join(" ");if(ENVIRONMENT_IS_NODE){fs.writeSync(2,text+"
");return}console.error(text)}function threadAlert(){var text=Array.prototype.slice.call(arguments).join(" ");postMessage({cmd:"alert",text:text,threadId:Module["_pthread_self"]()})}var err=threadPrintErr;self.alert=threadAlert;Module["instantiateWasm"]=(info,receiveInstance)=>{var instance=new WebAssembly.Instance(Module["wasmModule"],info);receiveInstance(instance);Module["wasmModule"]=null;return instance.exports};self.onunhandledrejection=e=>{throw e.reason??e};self.startWorker=instance=>{Module=instance;postMessage({"cmd":"loaded"})};self.onmessage=e=>{try{if(e.data.cmd==="load"){Module["wasmModule"]=e.data.wasmModule;for(const handler of e.data.handlers){Module[handler]=function(){postMessage({cmd:"callHandler",handler:handler,args:[...arguments]})}}Module["wasmMemory"]=e.data.wasmMemory;Module["buffer"]=Module["wasmMemory"].buffer;Module["ENVIRONMENT_IS_PTHREAD"]=true;if(typeof e.data.urlOrBlob=="string"){importScripts(e.data.urlOrBlob)}else{var objectUrl=URL.createObjectURL(e.data.urlOrBlob);importScripts(objectUrl);URL.revokeObjectURL(objectUrl)}WasmBackendModuleThreadedSimd(Module)}else if(e.data.cmd==="run"){Module["__emscripten_thread_init"](e.data.pthread_ptr,0,0,1);Module["establishStackSpace"]();Module["PThread"].receiveObjectTransfer(e.data);Module["PThread"].threadInitTLS();if(!initializedJS){pendingNotifiedProxyingQueues.forEach(queue=>{Module["executeNotifiedProxyingQueue"](queue)});pendingNotifiedProxyingQueues=[];initializedJS=true}try{Module["invokeEntryPoint"](e.data.start_routine,e.data.arg)}catch(ex){if(ex!="unwind"){if(ex instanceof Module["ExitStatus"]){if(Module["keepRuntimeAlive"]()){}else{Module["__emscripten_thread_exit"](ex.status)}}else{throw ex}}}}else if(e.data.cmd==="cancel"){if(Module["_pthread_self"]()){Module["__emscripten_thread_exit"](-1)}}else if(e.data.target==="setimmediate"){}else if(e.data.cmd==="processProxyingQueue"){if(initializedJS){Module["executeNotifiedProxyingQueue"](e.data.queue)}else{pendingNotifiedProxyingQueues.push(e.data.queue)}}else if(e.data.cmd){err("worker.js received unknown command "+e.data.cmd);err(e.data)}}catch(ex){if(Module["__emscripten_thread_crashed"]){Module["__emscripten_thread_crashed"]()}throw ex}};`;
});
var GB = Kt((Ug, Kv) => {
"use strict";
var Hv = (() => {
var r15 = typeof document != "undefined" && document.currentScript ? document.currentScript.src : void 0;
return typeof __filename != "undefined" && (r15 = r15 || __filename), function(e) {
@ -33502,7 +33522,7 @@ function setModelLoadOptions(config3) {
options.modelBasePath = config3.modelBasePath;
}
async function loadModel(modelPath) {
var _a, _b, _c2, _d2;
var _a, _b, _c2, _d2, _e, _f2;
let modelUrl = join(options.modelBasePath, modelPath || "");
if (!modelUrl.toLowerCase().endsWith(".json")) modelUrl += ".json";
const modelPathSegments = modelUrl.includes("/") ? modelUrl.split("/") : modelUrl.split("\\");
@ -33510,6 +33530,7 @@ async function loadModel(modelPath) {
const cachedModelName = "indexeddb://" + shortModelName;
modelStats[shortModelName] = {
name: shortModelName,
loaded: false,
sizeFromManifest: 0,
sizeLoadedWeights: 0,
sizeDesired: models_exports[shortModelName],
@ -33527,7 +33548,7 @@ async function loadModel(modelPath) {
modelStats[shortModelName].url = modelStats[shortModelName].inCache ? cachedModelName : modelUrl;
const tfLoadOptions = typeof fetch === "undefined" ? {} : { fetchFunc: (url, init4) => httpHandler(url, init4) };
let model23 = new Bl(modelStats[shortModelName].url, tfLoadOptions);
let loaded = false;
modelStats[shortModelName].loaded = false;
try {
model23.findIOHandler();
if (options.debug) log("model load handler:", model23["handler"]);
@ -33539,13 +33560,13 @@ async function loadModel(modelPath) {
modelStats[shortModelName].sizeFromManifest = ((_b = artifacts == null ? void 0 : artifacts.weightData) == null ? void 0 : _b.byteLength) || 0;
if (artifacts) model23.loadSync(artifacts);
else model23 = await M8(modelStats[shortModelName].inCache ? cachedModelName : modelUrl, tfLoadOptions);
modelStats[shortModelName].sizeLoadedWeights = ((_d2 = (_c2 = model23.artifacts) == null ? void 0 : _c2.weightData) == null ? void 0 : _d2.byteLength) || 0;
modelStats[shortModelName].sizeLoadedWeights = ((_d2 = (_c2 = model23.artifacts) == null ? void 0 : _c2.weightData) == null ? void 0 : _d2.byteLength) || ((_f2 = (_e = model23.artifacts) == null ? void 0 : _e.weightData) == null ? void 0 : _f2[0].byteLength) || 0;
if (options.verbose) log("load:", { model: shortModelName, url: model23["modelUrl"], bytes: modelStats[shortModelName].sizeLoadedWeights });
loaded = true;
modelStats[shortModelName].loaded = true;
} catch (err) {
log("error loading model:", modelUrl, err);
}
if (loaded && options.cacheModels && options.cacheSupported && !modelStats[shortModelName].inCache) {
if (modelStats[shortModelName].loaded && options.cacheModels && options.cacheSupported && !modelStats[shortModelName].inCache) {
try {
const saveResult = await model23.save(cachedModelName);
if (options.debug) log("model saved:", cachedModelName, saveResult);
@ -45381,13 +45402,13 @@ var Models = class {
let totalSizeWeights = 0;
let totalSizeLoading = 0;
for (const m of Object.values(modelStats)) {
totalSizeFromManifest += m.sizeFromManifest;
totalSizeWeights += m.sizeLoadedWeights;
totalSizeLoading += m.sizeDesired;
totalSizeFromManifest += Number.isNaN(+m.sizeFromManifest) ? 0 : m.sizeFromManifest;
totalSizeWeights += Number.isNaN(+m.sizeLoadedWeights) ? 0 : m.sizeLoadedWeights;
totalSizeLoading += Number.isNaN(+m.sizeDesired) ? 0 : m.sizeDesired;
}
const percentageLoaded = totalSizeLoading > 0 ? totalSizeWeights / totalSizeLoading : 0;
return {
numLoadedModels: Object.values(modelStats).length,
numLoadedModels: Object.values(modelStats).filter((m) => m == null ? void 0 : m.loaded).length,
numDefinedModels: Object.keys(this.models).length,
percentageLoaded,
totalSizeFromManifest,

File diff suppressed because one or more lines are too long

140
dist/human.js vendored

File diff suppressed because one or more lines are too long

14400
dist/human.node-gpu.js vendored

File diff suppressed because one or more lines are too long

14402
dist/human.node-wasm.js vendored

File diff suppressed because one or more lines are too long

14400
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

6
dist/tfjs.esm.js vendored

File diff suppressed because one or more lines are too long

35
dist/tfjs.version.js vendored
View File

@ -4,4 +4,37 @@
author: <https://github.com/vladmandic>'
*/
var e="4.21.0";var s="4.21.0";var t="4.21.0";var n="4.21.0";var r="4.21.0";var i="4.21.0";var h={tfjs:e,"tfjs-core":e,"tfjs-converter":s,"tfjs-backend-cpu":t,"tfjs-backend-webgl":n,"tfjs-backend-wasm":r,"tfjs-backend-webgpu":i};export{h as version};
// node_modules/.pnpm/@tensorflow+tfjs-core@4.21.0/node_modules/@tensorflow/tfjs-core/package.json
var version = "4.21.0";
// node_modules/.pnpm/@tensorflow+tfjs-converter@4.21.0_@tensorflow+tfjs-core@4.21.0/node_modules/@tensorflow/tfjs-converter/package.json
var version2 = "4.21.0";
// node_modules/.pnpm/@tensorflow+tfjs-backend-cpu@4.21.0_@tensorflow+tfjs-core@4.21.0/node_modules/@tensorflow/tfjs-backend-cpu/package.json
var version3 = "4.21.0";
// node_modules/.pnpm/@tensorflow+tfjs-backend-webgl@4.21.0_@tensorflow+tfjs-core@4.21.0/node_modules/@tensorflow/tfjs-backend-webgl/package.json
var version4 = "4.21.0";
// node_modules/.pnpm/@tensorflow+tfjs-backend-wasm@4.21.0_@tensorflow+tfjs-core@4.21.0/node_modules/@tensorflow/tfjs-backend-wasm/package.json
var version5 = "4.21.0";
// node_modules/.pnpm/@tensorflow+tfjs-backend-webgpu@4.21.0_@tensorflow+tfjs-core@4.21.0/node_modules/@tensorflow/tfjs-backend-webgpu/package.json
var version6 = "4.21.0";
// tfjs/tf-version.ts
var version7 = {
tfjs: version,
"tfjs-core": version,
// 'tfjs-data': tfjsDataVersion,
// 'tfjs-layers': tfjsLayersVersion,
"tfjs-converter": version2,
"tfjs-backend-cpu": version3,
"tfjs-backend-webgl": version4,
"tfjs-backend-wasm": version5,
"tfjs-backend-webgpu": version6
};
export {
version7 as version
};

View File

@ -100,13 +100,13 @@ export class Models {
let totalSizeWeights = 0;
let totalSizeLoading = 0;
for (const m of Object.values(modelStats)) {
totalSizeFromManifest += m.sizeFromManifest;
totalSizeWeights += m.sizeLoadedWeights;
totalSizeLoading += m.sizeDesired;
totalSizeFromManifest += Number.isNaN(+m.sizeFromManifest) ? 0 : m.sizeFromManifest;
totalSizeWeights += Number.isNaN(+m.sizeLoadedWeights) ? 0 : m.sizeLoadedWeights;
totalSizeLoading += Number.isNaN(+m.sizeDesired) ? 0 : m.sizeDesired;
}
const percentageLoaded = totalSizeLoading > 0 ? totalSizeWeights / totalSizeLoading : 0;
return {
numLoadedModels: Object.values(modelStats).length,
numLoadedModels: Object.values(modelStats).filter((m) => m?.loaded).length,
numDefinedModels: Object.keys(this.models).length,
percentageLoaded,
totalSizeFromManifest,

View File

@ -14,6 +14,7 @@ const options = {
export interface ModelInfo {
name: string,
loaded: boolean,
inCache: boolean,
sizeDesired: number,
sizeFromManifest: number,
@ -42,6 +43,7 @@ export async function loadModel(modelPath: string | undefined): Promise<GraphMod
const cachedModelName = 'indexeddb://' + shortModelName; // generate short model name for cache
modelStats[shortModelName] = {
name: shortModelName,
loaded: false,
sizeFromManifest: 0,
sizeLoadedWeights: 0,
sizeDesired: modelsDefs[shortModelName],
@ -59,7 +61,7 @@ export async function loadModel(modelPath: string | undefined): Promise<GraphMod
modelStats[shortModelName].url = modelStats[shortModelName].inCache ? cachedModelName : modelUrl;
const tfLoadOptions = typeof fetch === 'undefined' ? {} : { fetchFunc: (url: string, init?: RequestInit) => httpHandler(url, init) };
let model: GraphModel = new tf.GraphModel(modelStats[shortModelName].url, tfLoadOptions) as unknown as GraphModel; // create model prototype and decide if load from cache or from original modelurl
let loaded = false;
modelStats[shortModelName].loaded = false;
try {
// @ts-ignore private function
model.findIOHandler(); // decide how to actually load a model
@ -74,13 +76,13 @@ export async function loadModel(modelPath: string | undefined): Promise<GraphMod
if (artifacts) model.loadSync(artifacts); // load weights
else model = await tf.loadGraphModel(modelStats[shortModelName].inCache ? cachedModelName : modelUrl, tfLoadOptions) as unknown as GraphModel;
// @ts-ignore private property
modelStats[shortModelName].sizeLoadedWeights = model.artifacts?.weightData?.byteLength || 0;
modelStats[shortModelName].sizeLoadedWeights = model.artifacts?.weightData?.byteLength || model.artifacts?.weightData?.[0].byteLength || 0;
if (options.verbose) log('load:', { model: shortModelName, url: model['modelUrl'], bytes: modelStats[shortModelName].sizeLoadedWeights });
loaded = true;
modelStats[shortModelName].loaded = true;
} catch (err) {
log('error loading model:', modelUrl, err);
}
if (loaded && options.cacheModels && options.cacheSupported && !modelStats[shortModelName].inCache) { // save model to cache
if (modelStats[shortModelName].loaded && options.cacheModels && options.cacheSupported && !modelStats[shortModelName].inCache) { // save model to cache
try {
const saveResult = await model.save(cachedModelName);
if (options.debug) log('model saved:', cachedModelName, saveResult);

File diff suppressed because it is too large Load Diff