diff --git a/CHANGELOG.md b/CHANGELOG.md index a801abc8..bd107bae 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -9,7 +9,10 @@ ## Changelog -### **HEAD -> main** 2022/11/16 mandic00@live.com +### **HEAD -> main** 2022/11/17 mandic00@live.com + + +### **origin/main** 2022/11/16 mandic00@live.com - added webcam id specification - include external typedefs diff --git a/README.md b/README.md index 17ec465d..59487302 100644 --- a/README.md +++ b/README.md @@ -68,7 +68,7 @@ - **Full** [[*Live*]](https://vladmandic.github.io/human/demo/index.html) [[*Details*]](https://github.com/vladmandic/human/tree/main/demo): Main browser demo app that showcases all Human capabilities - **Simple** [[*Live*]](https://vladmandic.github.io/human/demo/typescript/index.html) [[*Details*]](https://github.com/vladmandic/human/tree/main/demo/typescript): Simple demo in WebCam processing demo in TypeScript - **Embedded** [[*Live*]](https://vladmandic.github.io/human/demo/video/index.html) [[*Details*]](https://github.com/vladmandic/human/tree/main/video/index.html): Even simpler demo with tiny code embedded in HTML file -- **Face Match** [[*Live*]](https://vladmandic.github.io/human/demo/facematch/index.html) [[*Details*]](https://github.com/vladmandic/human/tree/main/demo/facematch): Extract faces from images, calculates face descriptors and simmilarities and matches them to known database +- **Face Match** [[*Live*]](https://vladmandic.github.io/human/demo/facematch/index.html) [[*Details*]](https://github.com/vladmandic/human/tree/main/demo/facematch): Extract faces from images, calculates face descriptors and similarities and matches them to known database - **Face ID** [[*Live*]](https://vladmandic.github.io/human/demo/faceid/index.html) [[*Details*]](https://github.com/vladmandic/human/tree/main/demo/faceid): Runs multiple checks to validate webcam input before performing face match to faces in IndexDB - **Multi-thread** [[*Live*]](https://vladmandic.github.io/human/demo/multithread/index.html) [[*Details*]](https://github.com/vladmandic/human/tree/main/demo/multithread): Runs each Human module in a separate web worker for highest possible performance - **NextJS** [[*Live*]](https://vladmandic.github.io/human-next/out/index.html) [[*Details*]](https://github.com/vladmandic/human-next): Use Human with TypeScript, NextJS and ReactJS @@ -377,6 +377,16 @@ drawResults(); // start draw loop And for even better results, you can run detection in a separate web worker thread +


+ +## Detailed Usage + +- [**Wiki Home**](https://github.com/vladmandic/human/wiki) +- [**List of all available methods, properies and namespaces**](https://github.com/vladmandic/human/wiki/Usage) +- [**TypeDoc API Specification - Main class**](https://vladmandic.github.io/human/typedoc/classes/Human.html) +- [**TypeDoc API Specification - Full**](https://vladmandic.github.io/human/typedoc/) + +


## TypeDefs diff --git a/TODO.md b/TODO.md index 833a8d19..be48b6a4 100644 --- a/TODO.md +++ b/TODO.md @@ -80,6 +80,15 @@ Architecture: - Upgrade to **TFJS 4.0** with **strong typing** see [notes](https://github.com/vladmandic/human#typedefs) on how to use - `TypeDef` refactoring +- Re-architect `human.models` namespace for better dynamic model handling + Added additional methods `load`, `list`, `loaded`, `reset` - Add named export for improved bundler support when using non-default imports -- Support for `NodeJS` v19 +- Support for **NodeJS v19** - Upgrade to **TypeScript 4.9** + +Breaking changes: +- Replaced `result.face[n].iris` with `result.face[n].distance` +- Replaced `human.getModelStats()` with `human.models.stats()` +- Moved `human.similarity`, `human.distance` and `human.match` to namespace `human.match.*` +- Obsolete `human.enhance()` +- Obsolete `human.gl` diff --git a/demo/faceid/index.js b/demo/faceid/index.js index 3d18d96e..635d01de 100644 --- a/demo/faceid/index.js +++ b/demo/faceid/index.js @@ -4,359 +4,6 @@ author: ' */ - -// demo/faceid/index.ts -import * as H from "../../dist/human.esm.js"; - -// demo/faceid/indexdb.ts -var db; -var database = "human"; -var table = "person"; -var log = (...msg) => console.log("indexdb", ...msg); -async function open() { - if (db) - return true; - return new Promise((resolve) => { - const request = indexedDB.open(database, 1); - request.onerror = (evt) => log("error:", evt); - request.onupgradeneeded = (evt) => { - log("create:", evt.target); - db = evt.target.result; - db.createObjectStore(table, { keyPath: "id", autoIncrement: true }); - }; - request.onsuccess = (evt) => { - db = evt.target.result; - log("open:", db); - resolve(true); - }; - }); -} -async function load() { - const faceDB = []; - if (!db) - await open(); - return new Promise((resolve) => { - const cursor = db.transaction([table], "readwrite").objectStore(table).openCursor(null, "next"); - cursor.onerror = (evt) => log("load error:", evt); - cursor.onsuccess = (evt) => { - if (evt.target.result) { - faceDB.push(evt.target.result.value); - evt.target.result.continue(); - } else { - resolve(faceDB); - } - }; - }); -} -async function count() { - if (!db) - await open(); - return new Promise((resolve) => { - const store = db.transaction([table], "readwrite").objectStore(table).count(); - store.onerror = (evt) => log("count error:", evt); - store.onsuccess = () => resolve(store.result); - }); -} -async function save(faceRecord) { - if (!db) - await open(); - const newRecord = { name: faceRecord.name, descriptor: faceRecord.descriptor, image: faceRecord.image }; - db.transaction([table], "readwrite").objectStore(table).put(newRecord); - log("save:", newRecord); -} -async function remove(faceRecord) { - if (!db) - await open(); - db.transaction([table], "readwrite").objectStore(table).delete(faceRecord.id); - log("delete:", faceRecord); -} - -// demo/faceid/index.ts -var humanConfig = { - cacheSensitivity: 0, - modelBasePath: "../../models", - filter: { enabled: true, equalization: true }, - debug: true, - face: { - enabled: true, - detector: { rotation: true, return: true, cropFactor: 1.6, mask: false }, - description: { enabled: true }, - iris: { enabled: true }, - emotion: { enabled: false }, - antispoof: { enabled: true }, - liveness: { enabled: true } - }, - body: { enabled: false }, - hand: { enabled: false }, - object: { enabled: false }, - gesture: { enabled: true } -}; -var matchOptions = { order: 2, multiplier: 25, min: 0.2, max: 0.8 }; -var options = { - minConfidence: 0.6, - minSize: 224, - maxTime: 3e4, - blinkMin: 10, - blinkMax: 800, - threshold: 0.5, - distanceMin: 0.4, - distanceMax: 1, - mask: humanConfig.face.detector.mask, - rotation: humanConfig.face.detector.rotation, - cropFactor: humanConfig.face.detector.cropFactor, - ...matchOptions -}; -var ok = { - faceCount: { status: false, val: 0 }, - faceConfidence: { status: false, val: 0 }, - facingCenter: { status: false, val: 0 }, - lookingCenter: { status: false, val: 0 }, - blinkDetected: { status: false, val: 0 }, - faceSize: { status: false, val: 0 }, - antispoofCheck: { status: false, val: 0 }, - livenessCheck: { status: false, val: 0 }, - distance: { status: false, val: 0 }, - age: { status: false, val: 0 }, - gender: { status: false, val: 0 }, - timeout: { status: true, val: 0 }, - descriptor: { status: false, val: 0 }, - elapsedMs: { status: void 0, val: 0 }, - detectFPS: { status: void 0, val: 0 }, - drawFPS: { status: void 0, val: 0 } -}; -var allOk = () => ok.faceCount.status && ok.faceSize.status && ok.blinkDetected.status && ok.facingCenter.status && ok.lookingCenter.status && ok.faceConfidence.status && ok.antispoofCheck.status && ok.livenessCheck.status && ok.distance.status && ok.descriptor.status && ok.age.status && ok.gender.status; -var current = { face: null, record: null }; -var blink = { - start: 0, - end: 0, - time: 0 -}; -var human = new H.Human(humanConfig); -human.env.perfadd = false; -human.draw.options.font = 'small-caps 18px "Lato"'; -human.draw.options.lineHeight = 20; -var dom = { - video: document.getElementById("video"), - canvas: document.getElementById("canvas"), - log: document.getElementById("log"), - fps: document.getElementById("fps"), - match: document.getElementById("match"), - name: document.getElementById("name"), - save: document.getElementById("save"), - delete: document.getElementById("delete"), - retry: document.getElementById("retry"), - source: document.getElementById("source"), - ok: document.getElementById("ok") -}; -var timestamp = { detect: 0, draw: 0 }; -var startTime = 0; -var log2 = (...msg) => { - dom.log.innerText += msg.join(" ") + "\n"; - console.log(...msg); -}; -async function webCam() { - const cameraOptions = { audio: false, video: { facingMode: "user", resizeMode: "none", width: { ideal: document.body.clientWidth } } }; - const stream = await navigator.mediaDevices.getUserMedia(cameraOptions); - const ready = new Promise((resolve) => { - dom.video.onloadeddata = () => resolve(true); - }); - dom.video.srcObject = stream; - void dom.video.play(); - await ready; - dom.canvas.width = dom.video.videoWidth; - dom.canvas.height = dom.video.videoHeight; - dom.canvas.style.width = "50%"; - dom.canvas.style.height = "50%"; - if (human.env.initial) - log2("video:", dom.video.videoWidth, dom.video.videoHeight, "|", stream.getVideoTracks()[0].label); - dom.canvas.onclick = () => { - if (dom.video.paused) - void dom.video.play(); - else - dom.video.pause(); - }; -} -async function detectionLoop() { - var _a; - if (!dom.video.paused) { - if ((_a = current.face) == null ? void 0 : _a.tensor) - human.tf.dispose(current.face.tensor); - await human.detect(dom.video); - const now = human.now(); - ok.detectFPS.val = Math.round(1e4 / (now - timestamp.detect)) / 10; - timestamp.detect = now; - requestAnimationFrame(detectionLoop); - } -} -function drawValidationTests() { - let y = 32; - for (const [key, val] of Object.entries(ok)) { - let el = document.getElementById(`ok-${key}`); - if (!el) { - el = document.createElement("div"); - el.id = `ok-${key}`; - el.innerText = key; - el.className = "ok"; - el.style.top = `${y}px`; - dom.ok.appendChild(el); - } - if (typeof val.status === "boolean") - el.style.backgroundColor = val.status ? "lightgreen" : "lightcoral"; - const status = val.status ? "ok" : "fail"; - el.innerText = `${key}: ${val.val === 0 ? status : val.val}`; - y += 28; - } -} -async function validationLoop() { - var _a; - const interpolated = human.next(human.result); - human.draw.canvas(dom.video, dom.canvas); - await human.draw.all(dom.canvas, interpolated); - const now = human.now(); - ok.drawFPS.val = Math.round(1e4 / (now - timestamp.draw)) / 10; - timestamp.draw = now; - ok.faceCount.val = human.result.face.length; - ok.faceCount.status = ok.faceCount.val === 1; - if (ok.faceCount.status) { - const gestures = Object.values(human.result.gesture).map((gesture) => gesture.gesture); - if (gestures.includes("blink left eye") || gestures.includes("blink right eye")) - blink.start = human.now(); - if (blink.start > 0 && !gestures.includes("blink left eye") && !gestures.includes("blink right eye")) - blink.end = human.now(); - ok.blinkDetected.status = ok.blinkDetected.status || Math.abs(blink.end - blink.start) > options.blinkMin && Math.abs(blink.end - blink.start) < options.blinkMax; - if (ok.blinkDetected.status && blink.time === 0) - blink.time = Math.trunc(blink.end - blink.start); - ok.facingCenter.status = gestures.includes("facing center"); - ok.lookingCenter.status = gestures.includes("looking center"); - ok.faceConfidence.val = human.result.face[0].faceScore || human.result.face[0].boxScore || 0; - ok.faceConfidence.status = ok.faceConfidence.val >= options.minConfidence; - ok.antispoofCheck.val = human.result.face[0].real || 0; - ok.antispoofCheck.status = ok.antispoofCheck.val >= options.minConfidence; - ok.livenessCheck.val = human.result.face[0].live || 0; - ok.livenessCheck.status = ok.livenessCheck.val >= options.minConfidence; - ok.faceSize.val = Math.min(human.result.face[0].box[2], human.result.face[0].box[3]); - ok.faceSize.status = ok.faceSize.val >= options.minSize; - ok.distance.val = human.result.face[0].distance || 0; - ok.distance.status = ok.distance.val >= options.distanceMin && ok.distance.val <= options.distanceMax; - ok.descriptor.val = ((_a = human.result.face[0].embedding) == null ? void 0 : _a.length) || 0; - ok.descriptor.status = ok.descriptor.val > 0; - ok.age.val = human.result.face[0].age || 0; - ok.age.status = ok.age.val > 0; - ok.gender.val = human.result.face[0].genderScore || 0; - ok.gender.status = ok.gender.val >= options.minConfidence; - } - ok.timeout.status = ok.elapsedMs.val <= options.maxTime; - drawValidationTests(); - if (allOk() || !ok.timeout.status) { - dom.video.pause(); - return human.result.face[0]; - } - ok.elapsedMs.val = Math.trunc(human.now() - startTime); - return new Promise((resolve) => { - setTimeout(async () => { - await validationLoop(); - resolve(human.result.face[0]); - }, 30); - }); -} -async function saveRecords() { - var _a, _b, _c, _d; - if (dom.name.value.length > 0) { - const image = (_a = dom.canvas.getContext("2d")) == null ? void 0 : _a.getImageData(0, 0, dom.canvas.width, dom.canvas.height); - const rec = { id: 0, name: dom.name.value, descriptor: (_b = current.face) == null ? void 0 : _b.embedding, image }; - await save(rec); - log2("saved face record:", rec.name, "descriptor length:", (_d = (_c = current.face) == null ? void 0 : _c.embedding) == null ? void 0 : _d.length); - log2("known face records:", await count()); - } else { - log2("invalid name"); - } -} -async function deleteRecord() { - if (current.record && current.record.id > 0) { - await remove(current.record); - } -} -async function detectFace() { - var _a, _b, _c, _d; - dom.canvas.style.height = ""; - (_a = dom.canvas.getContext("2d")) == null ? void 0 : _a.clearRect(0, 0, options.minSize, options.minSize); - if (!((_b = current == null ? void 0 : current.face) == null ? void 0 : _b.tensor) || !((_c = current == null ? void 0 : current.face) == null ? void 0 : _c.embedding)) - return false; - console.log("face record:", current.face); - log2(`detected face: ${current.face.gender} ${current.face.age || 0}y distance ${100 * (current.face.distance || 0)}cm/${Math.round(100 * (current.face.distance || 0) / 2.54)}in`); - await human.tf.browser.toPixels(current.face.tensor, dom.canvas); - if (await count() === 0) { - log2("face database is empty: nothing to compare face with"); - document.body.style.background = "black"; - dom.delete.style.display = "none"; - return false; - } - const db2 = await load(); - const descriptors = db2.map((rec) => rec.descriptor).filter((desc) => desc.length > 0); - const res = human.match(current.face.embedding, descriptors, matchOptions); - current.record = db2[res.index] || null; - if (current.record) { - log2(`best match: ${current.record.name} | id: ${current.record.id} | similarity: ${Math.round(1e3 * res.similarity) / 10}%`); - dom.name.value = current.record.name; - dom.source.style.display = ""; - (_d = dom.source.getContext("2d")) == null ? void 0 : _d.putImageData(current.record.image, 0, 0); - } - document.body.style.background = res.similarity > options.threshold ? "darkgreen" : "maroon"; - return res.similarity > options.threshold; -} -async function main() { - var _a, _b, _c, _d; - ok.faceCount.status = false; - ok.faceConfidence.status = false; - ok.facingCenter.status = false; - ok.blinkDetected.status = false; - ok.faceSize.status = false; - ok.antispoofCheck.status = false; - ok.livenessCheck.status = false; - ok.age.status = false; - ok.gender.status = false; - ok.elapsedMs.val = 0; - dom.match.style.display = "none"; - dom.retry.style.display = "none"; - dom.source.style.display = "none"; - dom.canvas.style.height = "50%"; - document.body.style.background = "black"; - await webCam(); - await detectionLoop(); - startTime = human.now(); - current.face = await validationLoop(); - dom.canvas.width = ((_b = (_a = current.face) == null ? void 0 : _a.tensor) == null ? void 0 : _b.shape[1]) || options.minSize; - dom.canvas.height = ((_d = (_c = current.face) == null ? void 0 : _c.tensor) == null ? void 0 : _d.shape[0]) || options.minSize; - dom.source.width = dom.canvas.width; - dom.source.height = dom.canvas.height; - dom.canvas.style.width = ""; - dom.match.style.display = "flex"; - dom.save.style.display = "flex"; - dom.delete.style.display = "flex"; - dom.retry.style.display = "block"; - if (!allOk()) { - log2("did not find valid face"); - return false; - } - return detectFace(); -} -async function init() { - var _a, _b; - log2("human version:", human.version, "| tfjs version:", human.tf.version["tfjs-core"]); - log2("options:", JSON.stringify(options).replace(/{|}|"|\[|\]/g, "").replace(/,/g, " ")); - log2("initializing webcam..."); - await webCam(); - log2("loading human models..."); - await human.load(); - log2("initializing human..."); - log2("face embedding model:", humanConfig.face.description.enabled ? "faceres" : "", ((_a = humanConfig.face["mobilefacenet"]) == null ? void 0 : _a.enabled) ? "mobilefacenet" : "", ((_b = humanConfig.face["insightface"]) == null ? void 0 : _b.enabled) ? "insightface" : ""); - log2("loading face database..."); - log2("known face records:", await count()); - dom.retry.addEventListener("click", main); - dom.save.addEventListener("click", saveRecords); - dom.delete.addEventListener("click", deleteRecord); - await human.warmup(); - await main(); -} -window.onload = init; +import*as S from"../../dist/human.esm.js";var l,F="human",f="person",v=(...a)=>console.log("indexdb",...a);async function h(){return l?!0:new Promise(a=>{let n=indexedDB.open(F,1);n.onerror=o=>v("error:",o),n.onupgradeneeded=o=>{v("create:",o.target),l=o.target.result,l.createObjectStore(f,{keyPath:"id",autoIncrement:!0})},n.onsuccess=o=>{l=o.target.result,v("open:",l),a(!0)}})}async function C(){let a=[];return l||await h(),new Promise(n=>{let o=l.transaction([f],"readwrite").objectStore(f).openCursor(null,"next");o.onerror=i=>v("load error:",i),o.onsuccess=i=>{i.target.result?(a.push(i.target.result.value),i.target.result.continue()):n(a)}})}async function b(){return l||await h(),new Promise(a=>{let n=l.transaction([f],"readwrite").objectStore(f).count();n.onerror=o=>v("count error:",o),n.onsuccess=()=>a(n.result)})}async function x(a){l||await h();let n={name:a.name,descriptor:a.descriptor,image:a.image};l.transaction([f],"readwrite").objectStore(f).put(n),v("save:",n)}async function D(a){l||await h(),l.transaction([f],"readwrite").objectStore(f).delete(a.id),v("delete:",a)}var g={cacheSensitivity:0,modelBasePath:"../../models",filter:{enabled:!0,equalization:!0},debug:!0,face:{enabled:!0,detector:{rotation:!0,return:!0,cropFactor:1.6,mask:!1},description:{enabled:!0},iris:{enabled:!0},emotion:{enabled:!1},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!1},hand:{enabled:!1},object:{enabled:!1},gesture:{enabled:!0}},B={order:2,multiplier:25,min:.2,max:.8},r={minConfidence:.6,minSize:224,maxTime:3e4,blinkMin:10,blinkMax:800,threshold:.5,distanceMin:.4,distanceMax:1,mask:g.face.detector.mask,rotation:g.face.detector.rotation,cropFactor:g.face.detector.cropFactor,...B},e={faceCount:{status:!1,val:0},faceConfidence:{status:!1,val:0},facingCenter:{status:!1,val:0},lookingCenter:{status:!1,val:0},blinkDetected:{status:!1,val:0},faceSize:{status:!1,val:0},antispoofCheck:{status:!1,val:0},livenessCheck:{status:!1,val:0},distance:{status:!1,val:0},age:{status:!1,val:0},gender:{status:!1,val:0},timeout:{status:!0,val:0},descriptor:{status:!1,val:0},elapsedMs:{status:void 0,val:0},detectFPS:{status:void 0,val:0},drawFPS:{status:void 0,val:0}},E=()=>e.faceCount.status&&e.faceSize.status&&e.blinkDetected.status&&e.facingCenter.status&&e.lookingCenter.status&&e.faceConfidence.status&&e.antispoofCheck.status&&e.livenessCheck.status&&e.distance.status&&e.descriptor.status&&e.age.status&&e.gender.status,c={face:null,record:null},u={start:0,end:0,time:0},s=new S.Human(g);s.env.perfadd=!1;s.draw.options.font='small-caps 18px "Lato"';s.draw.options.lineHeight=20;var t={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("fps"),match:document.getElementById("match"),name:document.getElementById("name"),save:document.getElementById("save"),delete:document.getElementById("delete"),retry:document.getElementById("retry"),source:document.getElementById("source"),ok:document.getElementById("ok")},y={detect:0,draw:0},I=0,d=(...a)=>{t.log.innerText+=a.join(" ")+` +`,console.log(...a)};async function H(){let a={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth}}},n=await navigator.mediaDevices.getUserMedia(a),o=new Promise(i=>{t.video.onloadeddata=()=>i(!0)});t.video.srcObject=n,t.video.play(),await o,t.canvas.width=t.video.videoWidth,t.canvas.height=t.video.videoHeight,t.canvas.style.width="50%",t.canvas.style.height="50%",s.env.initial&&d("video:",t.video.videoWidth,t.video.videoHeight,"|",n.getVideoTracks()[0].label),t.canvas.onclick=()=>{t.video.paused?t.video.play():t.video.pause()}}async function T(){var a;if(!t.video.paused){(a=c.face)!=null&&a.tensor&&s.tf.dispose(c.face.tensor),await s.detect(t.video);let n=s.now();e.detectFPS.val=Math.round(1e4/(n-y.detect))/10,y.detect=n,requestAnimationFrame(T)}}function P(){let a=32;for(let[n,o]of Object.entries(e)){let i=document.getElementById(`ok-${n}`);i||(i=document.createElement("div"),i.id=`ok-${n}`,i.innerText=n,i.className="ok",i.style.top=`${a}px`,t.ok.appendChild(i)),typeof o.status=="boolean"&&(i.style.backgroundColor=o.status?"lightgreen":"lightcoral");let m=o.status?"ok":"fail";i.innerText=`${n}: ${o.val===0?m:o.val}`,a+=28}}async function R(){var o;let a=s.next(s.result);s.draw.canvas(t.video,t.canvas),await s.draw.all(t.canvas,a);let n=s.now();if(e.drawFPS.val=Math.round(1e4/(n-y.draw))/10,y.draw=n,e.faceCount.val=s.result.face.length,e.faceCount.status=e.faceCount.val===1,e.faceCount.status){let i=Object.values(s.result.gesture).map(m=>m.gesture);(i.includes("blink left eye")||i.includes("blink right eye"))&&(u.start=s.now()),u.start>0&&!i.includes("blink left eye")&&!i.includes("blink right eye")&&(u.end=s.now()),e.blinkDetected.status=e.blinkDetected.status||Math.abs(u.end-u.start)>r.blinkMin&&Math.abs(u.end-u.start)=r.minConfidence,e.antispoofCheck.val=s.result.face[0].real||0,e.antispoofCheck.status=e.antispoofCheck.val>=r.minConfidence,e.livenessCheck.val=s.result.face[0].live||0,e.livenessCheck.status=e.livenessCheck.val>=r.minConfidence,e.faceSize.val=Math.min(s.result.face[0].box[2],s.result.face[0].box[3]),e.faceSize.status=e.faceSize.val>=r.minSize,e.distance.val=s.result.face[0].distance||0,e.distance.status=e.distance.val>=r.distanceMin&&e.distance.val<=r.distanceMax,e.descriptor.val=((o=s.result.face[0].embedding)==null?void 0:o.length)||0,e.descriptor.status=e.descriptor.val>0,e.age.val=s.result.face[0].age||0,e.age.status=e.age.val>0,e.gender.val=s.result.face[0].genderScore||0,e.gender.status=e.gender.val>=r.minConfidence}return e.timeout.status=e.elapsedMs.val<=r.maxTime,P(),E()||!e.timeout.status?(t.video.pause(),s.result.face[0]):(e.elapsedMs.val=Math.trunc(s.now()-I),new Promise(i=>{setTimeout(async()=>{await R(),i(s.result.face[0])},30)}))}async function z(){var a,n,o,i;if(t.name.value.length>0){let m=(a=t.canvas.getContext("2d"))==null?void 0:a.getImageData(0,0,t.canvas.width,t.canvas.height),p={id:0,name:t.name.value,descriptor:(n=c.face)==null?void 0:n.embedding,image:m};await x(p),d("saved face record:",p.name,"descriptor length:",(i=(o=c.face)==null?void 0:o.embedding)==null?void 0:i.length),d("known face records:",await b())}else d("invalid name")}async function j(){c.record&&c.record.id>0&&await D(c.record)}async function $(){var i,m,p,k;if(t.canvas.style.height="",(i=t.canvas.getContext("2d"))==null||i.clearRect(0,0,r.minSize,r.minSize),!((m=c==null?void 0:c.face)!=null&&m.tensor)||!((p=c==null?void 0:c.face)!=null&&p.embedding))return!1;if(console.log("face record:",c.face),d(`detected face: ${c.face.gender} ${c.face.age||0}y distance ${100*(c.face.distance||0)}cm/${Math.round(100*(c.face.distance||0)/2.54)}in`),await s.tf.browser.toPixels(c.face.tensor,t.canvas),await b()===0)return d("face database is empty: nothing to compare face with"),document.body.style.background="black",t.delete.style.display="none",!1;let a=await C(),n=a.map(w=>w.descriptor).filter(w=>w.length>0),o=s.match.find(c.face.embedding,n,B);return c.record=a[o.index]||null,c.record&&(d(`best match: ${c.record.name} | id: ${c.record.id} | similarity: ${Math.round(1e3*o.similarity)/10}%`),t.name.value=c.record.name,t.source.style.display="",(k=t.source.getContext("2d"))==null||k.putImageData(c.record.image,0,0)),document.body.style.background=o.similarity>r.threshold?"darkgreen":"maroon",o.similarity>r.threshold}async function M(){var a,n,o,i;return e.faceCount.status=!1,e.faceConfidence.status=!1,e.facingCenter.status=!1,e.blinkDetected.status=!1,e.faceSize.status=!1,e.antispoofCheck.status=!1,e.livenessCheck.status=!1,e.age.status=!1,e.gender.status=!1,e.elapsedMs.val=0,t.match.style.display="none",t.retry.style.display="none",t.source.style.display="none",t.canvas.style.height="50%",document.body.style.background="black",await H(),await T(),I=s.now(),c.face=await R(),t.canvas.width=((n=(a=c.face)==null?void 0:a.tensor)==null?void 0:n.shape[1])||r.minSize,t.canvas.height=((i=(o=c.face)==null?void 0:o.tensor)==null?void 0:i.shape[0])||r.minSize,t.source.width=t.canvas.width,t.source.height=t.canvas.height,t.canvas.style.width="",t.match.style.display="flex",t.save.style.display="flex",t.delete.style.display="flex",t.retry.style.display="block",E()?$():(d("did not find valid face"),!1)}async function q(){var a,n;d("human version:",s.version,"| tfjs version:",s.tf.version["tfjs-core"]),d("options:",JSON.stringify(r).replace(/{|}|"|\[|\]/g,"").replace(/,/g," ")),d("initializing webcam..."),await H(),d("loading human models..."),await s.load(),d("initializing human..."),d("face embedding model:",g.face.description.enabled?"faceres":"",(a=g.face.mobilefacenet)!=null&&a.enabled?"mobilefacenet":"",(n=g.face.insightface)!=null&&n.enabled?"insightface":""),d("loading face database..."),d("known face records:",await b()),t.retry.addEventListener("click",M),t.save.addEventListener("click",z),t.delete.addEventListener("click",j),await s.warmup(),await M()}window.onload=q; //# sourceMappingURL=index.js.map diff --git a/demo/faceid/index.js.map b/demo/faceid/index.js.map index 219e73ec..e15df82b 100644 --- a/demo/faceid/index.js.map +++ b/demo/faceid/index.js.map @@ -1,7 +1,7 @@ { "version": 3, "sources": ["index.ts", "indexdb.ts"], - "sourcesContent": ["/**\n * Human demo for browsers\n * @default Human Library\n * @summary \n * @author \n * @copyright \n * @license MIT\n */\n\nimport * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human\nimport * as indexDb from './indexdb'; // methods to deal with indexdb\n\nconst humanConfig = { // user configuration for human, used to fine-tune behavior\n cacheSensitivity: 0,\n modelBasePath: '../../models',\n filter: { enabled: true, equalization: true }, // lets run with histogram equilizer\n debug: true,\n face: {\n enabled: true,\n detector: { rotation: true, return: true, cropFactor: 1.6, mask: false }, // return tensor is used to get detected face image\n description: { enabled: true }, // default model for face descriptor extraction is faceres\n // mobilefacenet: { enabled: true, modelPath: 'https://vladmandic.github.io/human-models/models/mobilefacenet.json' }, // alternative model\n // insightface: { enabled: true, modelPath: 'https://vladmandic.github.io/insightface/models/insightface-mobilenet-swish.json' }, // alternative model\n iris: { enabled: true }, // needed to determine gaze direction\n emotion: { enabled: false }, // not needed\n antispoof: { enabled: true }, // enable optional antispoof module\n liveness: { enabled: true }, // enable optional liveness module\n },\n body: { enabled: false },\n hand: { enabled: false },\n object: { enabled: false },\n gesture: { enabled: true }, // parses face and iris gestures\n};\n\n// const matchOptions = { order: 2, multiplier: 1000, min: 0.0, max: 1.0 }; // for embedding model\nconst matchOptions = { order: 2, multiplier: 25, min: 0.2, max: 0.8 }; // for faceres model\n\nconst options = {\n minConfidence: 0.6, // overal face confidence for box, face, gender, real, live\n minSize: 224, // min input to face descriptor model before degradation\n maxTime: 30000, // max time before giving up\n blinkMin: 10, // minimum duration of a valid blink\n blinkMax: 800, // maximum duration of a valid blink\n threshold: 0.5, // minimum similarity\n distanceMin: 0.4, // closest that face is allowed to be to the cammera in cm\n distanceMax: 1.0, // farthest that face is allowed to be to the cammera in cm\n mask: humanConfig.face.detector.mask,\n rotation: humanConfig.face.detector.rotation,\n cropFactor: humanConfig.face.detector.cropFactor,\n ...matchOptions,\n};\n\nconst ok: Record = { // must meet all rules\n faceCount: { status: false, val: 0 },\n faceConfidence: { status: false, val: 0 },\n facingCenter: { status: false, val: 0 },\n lookingCenter: { status: false, val: 0 },\n blinkDetected: { status: false, val: 0 },\n faceSize: { status: false, val: 0 },\n antispoofCheck: { status: false, val: 0 },\n livenessCheck: { status: false, val: 0 },\n distance: { status: false, val: 0 },\n age: { status: false, val: 0 },\n gender: { status: false, val: 0 },\n timeout: { status: true, val: 0 },\n descriptor: { status: false, val: 0 },\n elapsedMs: { status: undefined, val: 0 }, // total time while waiting for valid face\n detectFPS: { status: undefined, val: 0 }, // mark detection fps performance\n drawFPS: { status: undefined, val: 0 }, // mark redraw fps performance\n};\n\nconst allOk = () => ok.faceCount.status\n && ok.faceSize.status\n && ok.blinkDetected.status\n && ok.facingCenter.status\n && ok.lookingCenter.status\n && ok.faceConfidence.status\n && ok.antispoofCheck.status\n && ok.livenessCheck.status\n && ok.distance.status\n && ok.descriptor.status\n && ok.age.status\n && ok.gender.status;\n\nconst current: { face: H.FaceResult | null, record: indexDb.FaceRecord | null } = { face: null, record: null }; // current face record and matched database record\n\nconst blink = { // internal timers for blink start/end/duration\n start: 0,\n end: 0,\n time: 0,\n};\n\n// let db: Array<{ name: string, source: string, embedding: number[] }> = []; // holds loaded face descriptor database\nconst human = new H.Human(humanConfig); // create instance of human with overrides from user configuration\n\nhuman.env.perfadd = false; // is performance data showing instant or total values\nhuman.draw.options.font = 'small-caps 18px \"Lato\"'; // set font used to draw labels when using draw methods\nhuman.draw.options.lineHeight = 20;\n\nconst dom = { // grab instances of dom objects so we dont have to look them up later\n video: document.getElementById('video') as HTMLVideoElement,\n canvas: document.getElementById('canvas') as HTMLCanvasElement,\n log: document.getElementById('log') as HTMLPreElement,\n fps: document.getElementById('fps') as HTMLPreElement,\n match: document.getElementById('match') as HTMLDivElement,\n name: document.getElementById('name') as HTMLInputElement,\n save: document.getElementById('save') as HTMLSpanElement,\n delete: document.getElementById('delete') as HTMLSpanElement,\n retry: document.getElementById('retry') as HTMLDivElement,\n source: document.getElementById('source') as HTMLCanvasElement,\n ok: document.getElementById('ok') as HTMLDivElement,\n};\nconst timestamp = { detect: 0, draw: 0 }; // holds information used to calculate performance and possible memory leaks\nlet startTime = 0;\n\nconst log = (...msg) => { // helper method to output messages\n dom.log.innerText += msg.join(' ') + '\\n';\n console.log(...msg); // eslint-disable-line no-console\n};\n\nasync function webCam() { // initialize webcam\n // @ts-ignore resizeMode is not yet defined in tslib\n const cameraOptions: MediaStreamConstraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth } } };\n const stream: MediaStream = await navigator.mediaDevices.getUserMedia(cameraOptions);\n const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });\n dom.video.srcObject = stream;\n void dom.video.play();\n await ready;\n dom.canvas.width = dom.video.videoWidth;\n dom.canvas.height = dom.video.videoHeight;\n dom.canvas.style.width = '50%';\n dom.canvas.style.height = '50%';\n if (human.env.initial) log('video:', dom.video.videoWidth, dom.video.videoHeight, '|', stream.getVideoTracks()[0].label);\n dom.canvas.onclick = () => { // pause when clicked on screen and resume on next click\n if (dom.video.paused) void dom.video.play();\n else dom.video.pause();\n };\n}\n\nasync function detectionLoop() { // main detection loop\n if (!dom.video.paused) {\n if (current.face?.tensor) human.tf.dispose(current.face.tensor); // dispose previous tensor\n await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result\n const now = human.now();\n ok.detectFPS.val = Math.round(10000 / (now - timestamp.detect)) / 10;\n timestamp.detect = now;\n requestAnimationFrame(detectionLoop); // start new frame immediately\n }\n}\n\nfunction drawValidationTests() {\n let y = 32;\n for (const [key, val] of Object.entries(ok)) {\n let el = document.getElementById(`ok-${key}`);\n if (!el) {\n el = document.createElement('div');\n el.id = `ok-${key}`;\n el.innerText = key;\n el.className = 'ok';\n el.style.top = `${y}px`;\n dom.ok.appendChild(el);\n }\n if (typeof val.status === 'boolean') el.style.backgroundColor = val.status ? 'lightgreen' : 'lightcoral';\n const status = val.status ? 'ok' : 'fail';\n el.innerText = `${key}: ${val.val === 0 ? status : val.val}`;\n y += 28;\n }\n}\n\nasync function validationLoop(): Promise { // main screen refresh loop\n const interpolated = human.next(human.result); // smoothen result using last-known results\n human.draw.canvas(dom.video, dom.canvas); // draw canvas to screen\n await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.\n const now = human.now();\n ok.drawFPS.val = Math.round(10000 / (now - timestamp.draw)) / 10;\n timestamp.draw = now;\n ok.faceCount.val = human.result.face.length;\n ok.faceCount.status = ok.faceCount.val === 1; // must be exactly detected face\n if (ok.faceCount.status) { // skip the rest if no face\n const gestures: string[] = Object.values(human.result.gesture).map((gesture: H.GestureResult) => gesture.gesture); // flatten all gestures\n if (gestures.includes('blink left eye') || gestures.includes('blink right eye')) blink.start = human.now(); // blink starts when eyes get closed\n if (blink.start > 0 && !gestures.includes('blink left eye') && !gestures.includes('blink right eye')) blink.end = human.now(); // if blink started how long until eyes are back open\n ok.blinkDetected.status = ok.blinkDetected.status || (Math.abs(blink.end - blink.start) > options.blinkMin && Math.abs(blink.end - blink.start) < options.blinkMax);\n if (ok.blinkDetected.status && blink.time === 0) blink.time = Math.trunc(blink.end - blink.start);\n ok.facingCenter.status = gestures.includes('facing center');\n ok.lookingCenter.status = gestures.includes('looking center'); // must face camera and look at camera\n ok.faceConfidence.val = human.result.face[0].faceScore || human.result.face[0].boxScore || 0;\n ok.faceConfidence.status = ok.faceConfidence.val >= options.minConfidence;\n ok.antispoofCheck.val = human.result.face[0].real || 0;\n ok.antispoofCheck.status = ok.antispoofCheck.val >= options.minConfidence;\n ok.livenessCheck.val = human.result.face[0].live || 0;\n ok.livenessCheck.status = ok.livenessCheck.val >= options.minConfidence;\n ok.faceSize.val = Math.min(human.result.face[0].box[2], human.result.face[0].box[3]);\n ok.faceSize.status = ok.faceSize.val >= options.minSize;\n ok.distance.val = human.result.face[0].distance || 0;\n ok.distance.status = (ok.distance.val >= options.distanceMin) && (ok.distance.val <= options.distanceMax);\n ok.descriptor.val = human.result.face[0].embedding?.length || 0;\n ok.descriptor.status = ok.descriptor.val > 0;\n ok.age.val = human.result.face[0].age || 0;\n ok.age.status = ok.age.val > 0;\n ok.gender.val = human.result.face[0].genderScore || 0;\n ok.gender.status = ok.gender.val >= options.minConfidence;\n }\n // run again\n ok.timeout.status = ok.elapsedMs.val <= options.maxTime;\n drawValidationTests();\n if (allOk() || !ok.timeout.status) { // all criteria met\n dom.video.pause();\n return human.result.face[0];\n }\n ok.elapsedMs.val = Math.trunc(human.now() - startTime);\n return new Promise((resolve) => {\n setTimeout(async () => {\n await validationLoop(); // run validation loop until conditions are met\n resolve(human.result.face[0]); // recursive promise resolve\n }, 30); // use to slow down refresh from max refresh rate to target of 30 fps\n });\n}\n\nasync function saveRecords() {\n if (dom.name.value.length > 0) {\n const image = dom.canvas.getContext('2d')?.getImageData(0, 0, dom.canvas.width, dom.canvas.height) as ImageData;\n const rec = { id: 0, name: dom.name.value, descriptor: current.face?.embedding as number[], image };\n await indexDb.save(rec);\n log('saved face record:', rec.name, 'descriptor length:', current.face?.embedding?.length);\n log('known face records:', await indexDb.count());\n } else {\n log('invalid name');\n }\n}\n\nasync function deleteRecord() {\n if (current.record && current.record.id > 0) {\n await indexDb.remove(current.record);\n }\n}\n\nasync function detectFace() {\n dom.canvas.style.height = '';\n dom.canvas.getContext('2d')?.clearRect(0, 0, options.minSize, options.minSize);\n if (!current?.face?.tensor || !current?.face?.embedding) return false;\n console.log('face record:', current.face); // eslint-disable-line no-console\n log(`detected face: ${current.face.gender} ${current.face.age || 0}y distance ${100 * (current.face.distance || 0)}cm/${Math.round(100 * (current.face.distance || 0) / 2.54)}in`);\n await human.tf.browser.toPixels(current.face.tensor, dom.canvas);\n if (await indexDb.count() === 0) {\n log('face database is empty: nothing to compare face with');\n document.body.style.background = 'black';\n dom.delete.style.display = 'none';\n return false;\n }\n const db = await indexDb.load();\n const descriptors = db.map((rec) => rec.descriptor).filter((desc) => desc.length > 0);\n const res = human.match(current.face.embedding, descriptors, matchOptions);\n current.record = db[res.index] || null;\n if (current.record) {\n log(`best match: ${current.record.name} | id: ${current.record.id} | similarity: ${Math.round(1000 * res.similarity) / 10}%`);\n dom.name.value = current.record.name;\n dom.source.style.display = '';\n dom.source.getContext('2d')?.putImageData(current.record.image, 0, 0);\n }\n document.body.style.background = res.similarity > options.threshold ? 'darkgreen' : 'maroon';\n return res.similarity > options.threshold;\n}\n\nasync function main() { // main entry point\n ok.faceCount.status = false;\n ok.faceConfidence.status = false;\n ok.facingCenter.status = false;\n ok.blinkDetected.status = false;\n ok.faceSize.status = false;\n ok.antispoofCheck.status = false;\n ok.livenessCheck.status = false;\n ok.age.status = false;\n ok.gender.status = false;\n ok.elapsedMs.val = 0;\n dom.match.style.display = 'none';\n dom.retry.style.display = 'none';\n dom.source.style.display = 'none';\n dom.canvas.style.height = '50%';\n document.body.style.background = 'black';\n await webCam();\n await detectionLoop(); // start detection loop\n startTime = human.now();\n current.face = await validationLoop(); // start validation loop\n dom.canvas.width = current.face?.tensor?.shape[1] || options.minSize;\n dom.canvas.height = current.face?.tensor?.shape[0] || options.minSize;\n dom.source.width = dom.canvas.width;\n dom.source.height = dom.canvas.height;\n dom.canvas.style.width = '';\n dom.match.style.display = 'flex';\n dom.save.style.display = 'flex';\n dom.delete.style.display = 'flex';\n dom.retry.style.display = 'block';\n if (!allOk()) { // is all criteria met?\n log('did not find valid face');\n return false;\n }\n return detectFace();\n}\n\nasync function init() {\n log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']);\n log('options:', JSON.stringify(options).replace(/{|}|\"|\\[|\\]/g, '').replace(/,/g, ' '));\n log('initializing webcam...');\n await webCam(); // start webcam\n log('loading human models...');\n await human.load(); // preload all models\n log('initializing human...');\n log('face embedding model:', humanConfig.face.description.enabled ? 'faceres' : '', humanConfig.face['mobilefacenet']?.enabled ? 'mobilefacenet' : '', humanConfig.face['insightface']?.enabled ? 'insightface' : '');\n log('loading face database...');\n log('known face records:', await indexDb.count());\n dom.retry.addEventListener('click', main);\n dom.save.addEventListener('click', saveRecords);\n dom.delete.addEventListener('click', deleteRecord);\n await human.warmup(); // warmup function to initialize backend for future faster detection\n await main();\n}\n\nwindow.onload = init;\n", "let db: IDBDatabase; // instance of indexdb\n\nconst database = 'human';\nconst table = 'person';\n\nexport interface FaceRecord { id: number, name: string, descriptor: number[], image: ImageData }\n\nconst log = (...msg) => console.log('indexdb', ...msg); // eslint-disable-line no-console\n\nexport async function open() {\n if (db) return true;\n return new Promise((resolve) => {\n const request: IDBOpenDBRequest = indexedDB.open(database, 1);\n request.onerror = (evt) => log('error:', evt);\n request.onupgradeneeded = (evt: IDBVersionChangeEvent) => { // create if doesnt exist\n log('create:', evt.target);\n db = (evt.target as IDBOpenDBRequest).result;\n db.createObjectStore(table, { keyPath: 'id', autoIncrement: true });\n };\n request.onsuccess = (evt) => { // open\n db = (evt.target as IDBOpenDBRequest).result;\n log('open:', db);\n resolve(true);\n };\n });\n}\n\nexport async function load(): Promise {\n const faceDB: FaceRecord[] = [];\n if (!db) await open(); // open or create if not already done\n return new Promise((resolve) => {\n const cursor: IDBRequest = db.transaction([table], 'readwrite').objectStore(table).openCursor(null, 'next');\n cursor.onerror = (evt) => log('load error:', evt);\n cursor.onsuccess = (evt) => {\n if ((evt.target as IDBRequest).result) {\n faceDB.push((evt.target as IDBRequest).result.value);\n (evt.target as IDBRequest).result.continue();\n } else {\n resolve(faceDB);\n }\n };\n });\n}\n\nexport async function count(): Promise {\n if (!db) await open(); // open or create if not already done\n return new Promise((resolve) => {\n const store: IDBRequest = db.transaction([table], 'readwrite').objectStore(table).count();\n store.onerror = (evt) => log('count error:', evt);\n store.onsuccess = () => resolve(store.result);\n });\n}\n\nexport async function save(faceRecord: FaceRecord) {\n if (!db) await open(); // open or create if not already done\n const newRecord = { name: faceRecord.name, descriptor: faceRecord.descriptor, image: faceRecord.image }; // omit id as its autoincrement\n db.transaction([table], 'readwrite').objectStore(table).put(newRecord);\n log('save:', newRecord);\n}\n\nexport async function remove(faceRecord: FaceRecord) {\n if (!db) await open(); // open or create if not already done\n db.transaction([table], 'readwrite').objectStore(table).delete(faceRecord.id); // delete based on id\n log('delete:', faceRecord);\n}\n"], - "mappings": ";;;;;;;;AASA,YAAY,OAAO;;;ACTnB,IAAI;AAEJ,IAAM,WAAW;AACjB,IAAM,QAAQ;AAId,IAAM,MAAM,IAAI,QAAQ,QAAQ,IAAI,WAAW,GAAG,GAAG;AAErD,eAAsB,OAAO;AAC3B,MAAI;AAAI,WAAO;AACf,SAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,UAAM,UAA4B,UAAU,KAAK,UAAU,CAAC;AAC5D,YAAQ,UAAU,CAAC,QAAQ,IAAI,UAAU,GAAG;AAC5C,YAAQ,kBAAkB,CAAC,QAA+B;AACxD,UAAI,WAAW,IAAI,MAAM;AACzB,WAAM,IAAI,OAA4B;AACtC,SAAG,kBAAkB,OAAO,EAAE,SAAS,MAAM,eAAe,KAAK,CAAC;AAAA,IACpE;AACA,YAAQ,YAAY,CAAC,QAAQ;AAC3B,WAAM,IAAI,OAA4B;AACtC,UAAI,SAAS,EAAE;AACf,cAAQ,IAAI;AAAA,IACd;AAAA,EACF,CAAC;AACH;AAEA,eAAsB,OAA8B;AAClD,QAAM,SAAuB,CAAC;AAC9B,MAAI,CAAC;AAAI,UAAM,KAAK;AACpB,SAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,UAAM,SAAqB,GAAG,YAAY,CAAC,KAAK,GAAG,WAAW,EAAE,YAAY,KAAK,EAAE,WAAW,MAAM,MAAM;AAC1G,WAAO,UAAU,CAAC,QAAQ,IAAI,eAAe,GAAG;AAChD,WAAO,YAAY,CAAC,QAAQ;AAC1B,UAAK,IAAI,OAAsB,QAAQ;AACrC,eAAO,KAAM,IAAI,OAAsB,OAAO,KAAK;AACnD,QAAC,IAAI,OAAsB,OAAO,SAAS;AAAA,MAC7C,OAAO;AACL,gBAAQ,MAAM;AAAA,MAChB;AAAA,IACF;AAAA,EACF,CAAC;AACH;AAEA,eAAsB,QAAyB;AAC7C,MAAI,CAAC;AAAI,UAAM,KAAK;AACpB,SAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,UAAM,QAAoB,GAAG,YAAY,CAAC,KAAK,GAAG,WAAW,EAAE,YAAY,KAAK,EAAE,MAAM;AACxF,UAAM,UAAU,CAAC,QAAQ,IAAI,gBAAgB,GAAG;AAChD,UAAM,YAAY,MAAM,QAAQ,MAAM,MAAM;AAAA,EAC9C,CAAC;AACH;AAEA,eAAsB,KAAK,YAAwB;AACjD,MAAI,CAAC;AAAI,UAAM,KAAK;AACpB,QAAM,YAAY,EAAE,MAAM,WAAW,MAAM,YAAY,WAAW,YAAY,OAAO,WAAW,MAAM;AACtG,KAAG,YAAY,CAAC,KAAK,GAAG,WAAW,EAAE,YAAY,KAAK,EAAE,IAAI,SAAS;AACrE,MAAI,SAAS,SAAS;AACxB;AAEA,eAAsB,OAAO,YAAwB;AACnD,MAAI,CAAC;AAAI,UAAM,KAAK;AACpB,KAAG,YAAY,CAAC,KAAK,GAAG,WAAW,EAAE,YAAY,KAAK,EAAE,OAAO,WAAW,EAAE;AAC5E,MAAI,WAAW,UAAU;AAC3B;;;ADpDA,IAAM,cAAc;AAAA,EAClB,kBAAkB;AAAA,EAClB,eAAe;AAAA,EACf,QAAQ,EAAE,SAAS,MAAM,cAAc,KAAK;AAAA,EAC5C,OAAO;AAAA,EACP,MAAM;AAAA,IACJ,SAAS;AAAA,IACT,UAAU,EAAE,UAAU,MAAM,QAAQ,MAAM,YAAY,KAAK,MAAM,MAAM;AAAA,IACvE,aAAa,EAAE,SAAS,KAAK;AAAA,IAG7B,MAAM,EAAE,SAAS,KAAK;AAAA,IACtB,SAAS,EAAE,SAAS,MAAM;AAAA,IAC1B,WAAW,EAAE,SAAS,KAAK;AAAA,IAC3B,UAAU,EAAE,SAAS,KAAK;AAAA,EAC5B;AAAA,EACA,MAAM,EAAE,SAAS,MAAM;AAAA,EACvB,MAAM,EAAE,SAAS,MAAM;AAAA,EACvB,QAAQ,EAAE,SAAS,MAAM;AAAA,EACzB,SAAS,EAAE,SAAS,KAAK;AAC3B;AAGA,IAAM,eAAe,EAAE,OAAO,GAAG,YAAY,IAAI,KAAK,KAAK,KAAK,IAAI;AAEpE,IAAM,UAAU;AAAA,EACd,eAAe;AAAA,EACf,SAAS;AAAA,EACT,SAAS;AAAA,EACT,UAAU;AAAA,EACV,UAAU;AAAA,EACV,WAAW;AAAA,EACX,aAAa;AAAA,EACb,aAAa;AAAA,EACb,MAAM,YAAY,KAAK,SAAS;AAAA,EAChC,UAAU,YAAY,KAAK,SAAS;AAAA,EACpC,YAAY,YAAY,KAAK,SAAS;AAAA,EACtC,GAAG;AACL;AAEA,IAAM,KAAmE;AAAA,EACvE,WAAW,EAAE,QAAQ,OAAO,KAAK,EAAE;AAAA,EACnC,gBAAgB,EAAE,QAAQ,OAAO,KAAK,EAAE;AAAA,EACxC,cAAc,EAAE,QAAQ,OAAO,KAAK,EAAE;AAAA,EACtC,eAAe,EAAE,QAAQ,OAAO,KAAK,EAAE;AAAA,EACvC,eAAe,EAAE,QAAQ,OAAO,KAAK,EAAE;AAAA,EACvC,UAAU,EAAE,QAAQ,OAAO,KAAK,EAAE;AAAA,EAClC,gBAAgB,EAAE,QAAQ,OAAO,KAAK,EAAE;AAAA,EACxC,eAAe,EAAE,QAAQ,OAAO,KAAK,EAAE;AAAA,EACvC,UAAU,EAAE,QAAQ,OAAO,KAAK,EAAE;AAAA,EAClC,KAAK,EAAE,QAAQ,OAAO,KAAK,EAAE;AAAA,EAC7B,QAAQ,EAAE,QAAQ,OAAO,KAAK,EAAE;AAAA,EAChC,SAAS,EAAE,QAAQ,MAAM,KAAK,EAAE;AAAA,EAChC,YAAY,EAAE,QAAQ,OAAO,KAAK,EAAE;AAAA,EACpC,WAAW,EAAE,QAAQ,QAAW,KAAK,EAAE;AAAA,EACvC,WAAW,EAAE,QAAQ,QAAW,KAAK,EAAE;AAAA,EACvC,SAAS,EAAE,QAAQ,QAAW,KAAK,EAAE;AACvC;AAEA,IAAM,QAAQ,MAAM,GAAG,UAAU,UAC5B,GAAG,SAAS,UACZ,GAAG,cAAc,UACjB,GAAG,aAAa,UAChB,GAAG,cAAc,UACjB,GAAG,eAAe,UAClB,GAAG,eAAe,UAClB,GAAG,cAAc,UACjB,GAAG,SAAS,UACZ,GAAG,WAAW,UACd,GAAG,IAAI,UACP,GAAG,OAAO;AAEf,IAAM,UAA4E,EAAE,MAAM,MAAM,QAAQ,KAAK;AAE7G,IAAM,QAAQ;AAAA,EACZ,OAAO;AAAA,EACP,KAAK;AAAA,EACL,MAAM;AACR;AAGA,IAAM,QAAQ,IAAM,QAAM,WAAW;AAErC,MAAM,IAAI,UAAU;AACpB,MAAM,KAAK,QAAQ,OAAO;AAC1B,MAAM,KAAK,QAAQ,aAAa;AAEhC,IAAM,MAAM;AAAA,EACV,OAAO,SAAS,eAAe,OAAO;AAAA,EACtC,QAAQ,SAAS,eAAe,QAAQ;AAAA,EACxC,KAAK,SAAS,eAAe,KAAK;AAAA,EAClC,KAAK,SAAS,eAAe,KAAK;AAAA,EAClC,OAAO,SAAS,eAAe,OAAO;AAAA,EACtC,MAAM,SAAS,eAAe,MAAM;AAAA,EACpC,MAAM,SAAS,eAAe,MAAM;AAAA,EACpC,QAAQ,SAAS,eAAe,QAAQ;AAAA,EACxC,OAAO,SAAS,eAAe,OAAO;AAAA,EACtC,QAAQ,SAAS,eAAe,QAAQ;AAAA,EACxC,IAAI,SAAS,eAAe,IAAI;AAClC;AACA,IAAM,YAAY,EAAE,QAAQ,GAAG,MAAM,EAAE;AACvC,IAAI,YAAY;AAEhB,IAAMA,OAAM,IAAI,QAAQ;AACtB,MAAI,IAAI,aAAa,IAAI,KAAK,GAAG,IAAI;AACrC,UAAQ,IAAI,GAAG,GAAG;AACpB;AAEA,eAAe,SAAS;AAEtB,QAAM,gBAAwC,EAAE,OAAO,OAAO,OAAO,EAAE,YAAY,QAAQ,YAAY,QAAQ,OAAO,EAAE,OAAO,SAAS,KAAK,YAAY,EAAE,EAAE;AAC7J,QAAM,SAAsB,MAAM,UAAU,aAAa,aAAa,aAAa;AACnF,QAAM,QAAQ,IAAI,QAAQ,CAAC,YAAY;AAAE,QAAI,MAAM,eAAe,MAAM,QAAQ,IAAI;AAAA,EAAG,CAAC;AACxF,MAAI,MAAM,YAAY;AACtB,OAAK,IAAI,MAAM,KAAK;AACpB,QAAM;AACN,MAAI,OAAO,QAAQ,IAAI,MAAM;AAC7B,MAAI,OAAO,SAAS,IAAI,MAAM;AAC9B,MAAI,OAAO,MAAM,QAAQ;AACzB,MAAI,OAAO,MAAM,SAAS;AAC1B,MAAI,MAAM,IAAI;AAAS,IAAAA,KAAI,UAAU,IAAI,MAAM,YAAY,IAAI,MAAM,aAAa,KAAK,OAAO,eAAe,EAAE,GAAG,KAAK;AACvH,MAAI,OAAO,UAAU,MAAM;AACzB,QAAI,IAAI,MAAM;AAAQ,WAAK,IAAI,MAAM,KAAK;AAAA;AACrC,UAAI,MAAM,MAAM;AAAA,EACvB;AACF;AAEA,eAAe,gBAAgB;AA3I/B;AA4IE,MAAI,CAAC,IAAI,MAAM,QAAQ;AACrB,SAAI,aAAQ,SAAR,mBAAc;AAAQ,YAAM,GAAG,QAAQ,QAAQ,KAAK,MAAM;AAC9D,UAAM,MAAM,OAAO,IAAI,KAAK;AAC5B,UAAM,MAAM,MAAM,IAAI;AACtB,OAAG,UAAU,MAAM,KAAK,MAAM,OAAS,MAAM,UAAU,OAAO,IAAI;AAClE,cAAU,SAAS;AACnB,0BAAsB,aAAa;AAAA,EACrC;AACF;AAEA,SAAS,sBAAsB;AAC7B,MAAI,IAAI;AACR,aAAW,CAAC,KAAK,GAAG,KAAK,OAAO,QAAQ,EAAE,GAAG;AAC3C,QAAI,KAAK,SAAS,eAAe,MAAM,KAAK;AAC5C,QAAI,CAAC,IAAI;AACP,WAAK,SAAS,cAAc,KAAK;AACjC,SAAG,KAAK,MAAM;AACd,SAAG,YAAY;AACf,SAAG,YAAY;AACf,SAAG,MAAM,MAAM,GAAG;AAClB,UAAI,GAAG,YAAY,EAAE;AAAA,IACvB;AACA,QAAI,OAAO,IAAI,WAAW;AAAW,SAAG,MAAM,kBAAkB,IAAI,SAAS,eAAe;AAC5F,UAAM,SAAS,IAAI,SAAS,OAAO;AACnC,OAAG,YAAY,GAAG,QAAQ,IAAI,QAAQ,IAAI,SAAS,IAAI;AACvD,SAAK;AAAA,EACP;AACF;AAEA,eAAe,iBAAwC;AAzKvD;AA0KE,QAAM,eAAe,MAAM,KAAK,MAAM,MAAM;AAC5C,QAAM,KAAK,OAAO,IAAI,OAAO,IAAI,MAAM;AACvC,QAAM,MAAM,KAAK,IAAI,IAAI,QAAQ,YAAY;AAC7C,QAAM,MAAM,MAAM,IAAI;AACtB,KAAG,QAAQ,MAAM,KAAK,MAAM,OAAS,MAAM,UAAU,KAAK,IAAI;AAC9D,YAAU,OAAO;AACjB,KAAG,UAAU,MAAM,MAAM,OAAO,KAAK;AACrC,KAAG,UAAU,SAAS,GAAG,UAAU,QAAQ;AAC3C,MAAI,GAAG,UAAU,QAAQ;AACvB,UAAM,WAAqB,OAAO,OAAO,MAAM,OAAO,OAAO,EAAE,IAAI,CAAC,YAA6B,QAAQ,OAAO;AAChH,QAAI,SAAS,SAAS,gBAAgB,KAAK,SAAS,SAAS,iBAAiB;AAAG,YAAM,QAAQ,MAAM,IAAI;AACzG,QAAI,MAAM,QAAQ,KAAK,CAAC,SAAS,SAAS,gBAAgB,KAAK,CAAC,SAAS,SAAS,iBAAiB;AAAG,YAAM,MAAM,MAAM,IAAI;AAC5H,OAAG,cAAc,SAAS,GAAG,cAAc,UAAW,KAAK,IAAI,MAAM,MAAM,MAAM,KAAK,IAAI,QAAQ,YAAY,KAAK,IAAI,MAAM,MAAM,MAAM,KAAK,IAAI,QAAQ;AAC1J,QAAI,GAAG,cAAc,UAAU,MAAM,SAAS;AAAG,YAAM,OAAO,KAAK,MAAM,MAAM,MAAM,MAAM,KAAK;AAChG,OAAG,aAAa,SAAS,SAAS,SAAS,eAAe;AAC1D,OAAG,cAAc,SAAS,SAAS,SAAS,gBAAgB;AAC5D,OAAG,eAAe,MAAM,MAAM,OAAO,KAAK,GAAG,aAAa,MAAM,OAAO,KAAK,GAAG,YAAY;AAC3F,OAAG,eAAe,SAAS,GAAG,eAAe,OAAO,QAAQ;AAC5D,OAAG,eAAe,MAAM,MAAM,OAAO,KAAK,GAAG,QAAQ;AACrD,OAAG,eAAe,SAAS,GAAG,eAAe,OAAO,QAAQ;AAC5D,OAAG,cAAc,MAAM,MAAM,OAAO,KAAK,GAAG,QAAQ;AACpD,OAAG,cAAc,SAAS,GAAG,cAAc,OAAO,QAAQ;AAC1D,OAAG,SAAS,MAAM,KAAK,IAAI,MAAM,OAAO,KAAK,GAAG,IAAI,IAAI,MAAM,OAAO,KAAK,GAAG,IAAI,EAAE;AACnF,OAAG,SAAS,SAAS,GAAG,SAAS,OAAO,QAAQ;AAChD,OAAG,SAAS,MAAM,MAAM,OAAO,KAAK,GAAG,YAAY;AACnD,OAAG,SAAS,SAAU,GAAG,SAAS,OAAO,QAAQ,eAAiB,GAAG,SAAS,OAAO,QAAQ;AAC7F,OAAG,WAAW,QAAM,WAAM,OAAO,KAAK,GAAG,cAArB,mBAAgC,WAAU;AAC9D,OAAG,WAAW,SAAS,GAAG,WAAW,MAAM;AAC3C,OAAG,IAAI,MAAM,MAAM,OAAO,KAAK,GAAG,OAAO;AACzC,OAAG,IAAI,SAAS,GAAG,IAAI,MAAM;AAC7B,OAAG,OAAO,MAAM,MAAM,OAAO,KAAK,GAAG,eAAe;AACpD,OAAG,OAAO,SAAS,GAAG,OAAO,OAAO,QAAQ;AAAA,EAC9C;AAEA,KAAG,QAAQ,SAAS,GAAG,UAAU,OAAO,QAAQ;AAChD,sBAAoB;AACpB,MAAI,MAAM,KAAK,CAAC,GAAG,QAAQ,QAAQ;AACjC,QAAI,MAAM,MAAM;AAChB,WAAO,MAAM,OAAO,KAAK;AAAA,EAC3B;AACA,KAAG,UAAU,MAAM,KAAK,MAAM,MAAM,IAAI,IAAI,SAAS;AACrD,SAAO,IAAI,QAAQ,CAAC,YAAY;AAC9B,eAAW,YAAY;AACrB,YAAM,eAAe;AACrB,cAAQ,MAAM,OAAO,KAAK,EAAE;AAAA,IAC9B,GAAG,EAAE;AAAA,EACP,CAAC;AACH;AAEA,eAAe,cAAc;AA3N7B;AA4NE,MAAI,IAAI,KAAK,MAAM,SAAS,GAAG;AAC7B,UAAM,SAAQ,SAAI,OAAO,WAAW,IAAI,MAA1B,mBAA6B,aAAa,GAAG,GAAG,IAAI,OAAO,OAAO,IAAI,OAAO;AAC3F,UAAM,MAAM,EAAE,IAAI,GAAG,MAAM,IAAI,KAAK,OAAO,aAAY,aAAQ,SAAR,mBAAc,WAAuB,MAAM;AAClG,UAAc,KAAK,GAAG;AACtB,IAAAA,KAAI,sBAAsB,IAAI,MAAM,uBAAsB,mBAAQ,SAAR,mBAAc,cAAd,mBAAyB,MAAM;AACzF,IAAAA,KAAI,uBAAuB,MAAc,MAAM,CAAC;AAAA,EAClD,OAAO;AACL,IAAAA,KAAI,cAAc;AAAA,EACpB;AACF;AAEA,eAAe,eAAe;AAC5B,MAAI,QAAQ,UAAU,QAAQ,OAAO,KAAK,GAAG;AAC3C,UAAc,OAAO,QAAQ,MAAM;AAAA,EACrC;AACF;AAEA,eAAe,aAAa;AA7O5B;AA8OE,MAAI,OAAO,MAAM,SAAS;AAC1B,YAAI,OAAO,WAAW,IAAI,MAA1B,mBAA6B,UAAU,GAAG,GAAG,QAAQ,SAAS,QAAQ;AACtE,MAAI,GAAC,wCAAS,SAAT,mBAAe,WAAU,GAAC,wCAAS,SAAT,mBAAe;AAAW,WAAO;AAChE,UAAQ,IAAI,gBAAgB,QAAQ,IAAI;AACxC,EAAAA,KAAI,kBAAkB,QAAQ,KAAK,UAAU,QAAQ,KAAK,OAAO,eAAe,OAAO,QAAQ,KAAK,YAAY,QAAQ,KAAK,MAAM,OAAO,QAAQ,KAAK,YAAY,KAAK,IAAI,KAAK;AACjL,QAAM,MAAM,GAAG,QAAQ,SAAS,QAAQ,KAAK,QAAQ,IAAI,MAAM;AAC/D,MAAI,MAAc,MAAM,MAAM,GAAG;AAC/B,IAAAA,KAAI,sDAAsD;AAC1D,aAAS,KAAK,MAAM,aAAa;AACjC,QAAI,OAAO,MAAM,UAAU;AAC3B,WAAO;AAAA,EACT;AACA,QAAMC,MAAK,MAAc,KAAK;AAC9B,QAAM,cAAcA,IAAG,IAAI,CAAC,QAAQ,IAAI,UAAU,EAAE,OAAO,CAAC,SAAS,KAAK,SAAS,CAAC;AACpF,QAAM,MAAM,MAAM,MAAM,QAAQ,KAAK,WAAW,aAAa,YAAY;AACzE,UAAQ,SAASA,IAAG,IAAI,UAAU;AAClC,MAAI,QAAQ,QAAQ;AAClB,IAAAD,KAAI,eAAe,QAAQ,OAAO,cAAc,QAAQ,OAAO,oBAAoB,KAAK,MAAM,MAAO,IAAI,UAAU,IAAI,KAAK;AAC5H,QAAI,KAAK,QAAQ,QAAQ,OAAO;AAChC,QAAI,OAAO,MAAM,UAAU;AAC3B,cAAI,OAAO,WAAW,IAAI,MAA1B,mBAA6B,aAAa,QAAQ,OAAO,OAAO,GAAG;AAAA,EACrE;AACA,WAAS,KAAK,MAAM,aAAa,IAAI,aAAa,QAAQ,YAAY,cAAc;AACpF,SAAO,IAAI,aAAa,QAAQ;AAClC;AAEA,eAAe,OAAO;AAxQtB;AAyQE,KAAG,UAAU,SAAS;AACtB,KAAG,eAAe,SAAS;AAC3B,KAAG,aAAa,SAAS;AACzB,KAAG,cAAc,SAAS;AAC1B,KAAG,SAAS,SAAS;AACrB,KAAG,eAAe,SAAS;AAC3B,KAAG,cAAc,SAAS;AAC1B,KAAG,IAAI,SAAS;AAChB,KAAG,OAAO,SAAS;AACnB,KAAG,UAAU,MAAM;AACnB,MAAI,MAAM,MAAM,UAAU;AAC1B,MAAI,MAAM,MAAM,UAAU;AAC1B,MAAI,OAAO,MAAM,UAAU;AAC3B,MAAI,OAAO,MAAM,SAAS;AAC1B,WAAS,KAAK,MAAM,aAAa;AACjC,QAAM,OAAO;AACb,QAAM,cAAc;AACpB,cAAY,MAAM,IAAI;AACtB,UAAQ,OAAO,MAAM,eAAe;AACpC,MAAI,OAAO,UAAQ,mBAAQ,SAAR,mBAAc,WAAd,mBAAsB,MAAM,OAAM,QAAQ;AAC7D,MAAI,OAAO,WAAS,mBAAQ,SAAR,mBAAc,WAAd,mBAAsB,MAAM,OAAM,QAAQ;AAC9D,MAAI,OAAO,QAAQ,IAAI,OAAO;AAC9B,MAAI,OAAO,SAAS,IAAI,OAAO;AAC/B,MAAI,OAAO,MAAM,QAAQ;AACzB,MAAI,MAAM,MAAM,UAAU;AAC1B,MAAI,KAAK,MAAM,UAAU;AACzB,MAAI,OAAO,MAAM,UAAU;AAC3B,MAAI,MAAM,MAAM,UAAU;AAC1B,MAAI,CAAC,MAAM,GAAG;AACZ,IAAAA,KAAI,yBAAyB;AAC7B,WAAO;AAAA,EACT;AACA,SAAO,WAAW;AACpB;AAEA,eAAe,OAAO;AA5StB;AA6SE,EAAAA,KAAI,kBAAkB,MAAM,SAAS,mBAAmB,MAAM,GAAG,QAAQ,YAAY;AACrF,EAAAA,KAAI,YAAY,KAAK,UAAU,OAAO,EAAE,QAAQ,gBAAgB,EAAE,EAAE,QAAQ,MAAM,GAAG,CAAC;AACtF,EAAAA,KAAI,wBAAwB;AAC5B,QAAM,OAAO;AACb,EAAAA,KAAI,yBAAyB;AAC7B,QAAM,MAAM,KAAK;AACjB,EAAAA,KAAI,uBAAuB;AAC3B,EAAAA,KAAI,yBAAyB,YAAY,KAAK,YAAY,UAAU,YAAY,MAAI,iBAAY,KAAK,qBAAjB,mBAAmC,WAAU,kBAAkB,MAAI,iBAAY,KAAK,mBAAjB,mBAAiC,WAAU,gBAAgB,EAAE;AACpN,EAAAA,KAAI,0BAA0B;AAC9B,EAAAA,KAAI,uBAAuB,MAAc,MAAM,CAAC;AAChD,MAAI,MAAM,iBAAiB,SAAS,IAAI;AACxC,MAAI,KAAK,iBAAiB,SAAS,WAAW;AAC9C,MAAI,OAAO,iBAAiB,SAAS,YAAY;AACjD,QAAM,MAAM,OAAO;AACnB,QAAM,KAAK;AACb;AAEA,OAAO,SAAS;", - "names": ["log", "db"] + "sourcesContent": ["/**\n * Human demo for browsers\n * @default Human Library\n * @summary \n * @author \n * @copyright \n * @license MIT\n */\n\nimport * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human\nimport * as indexDb from './indexdb'; // methods to deal with indexdb\n\nconst humanConfig = { // user configuration for human, used to fine-tune behavior\n cacheSensitivity: 0,\n modelBasePath: '../../models',\n filter: { enabled: true, equalization: true }, // lets run with histogram equilizer\n debug: true,\n face: {\n enabled: true,\n detector: { rotation: true, return: true, cropFactor: 1.6, mask: false }, // return tensor is used to get detected face image\n description: { enabled: true }, // default model for face descriptor extraction is faceres\n // mobilefacenet: { enabled: true, modelPath: 'https://vladmandic.github.io/human-models/models/mobilefacenet.json' }, // alternative model\n // insightface: { enabled: true, modelPath: 'https://vladmandic.github.io/insightface/models/insightface-mobilenet-swish.json' }, // alternative model\n iris: { enabled: true }, // needed to determine gaze direction\n emotion: { enabled: false }, // not needed\n antispoof: { enabled: true }, // enable optional antispoof module\n liveness: { enabled: true }, // enable optional liveness module\n },\n body: { enabled: false },\n hand: { enabled: false },\n object: { enabled: false },\n gesture: { enabled: true }, // parses face and iris gestures\n};\n\n// const matchOptions = { order: 2, multiplier: 1000, min: 0.0, max: 1.0 }; // for embedding model\nconst matchOptions = { order: 2, multiplier: 25, min: 0.2, max: 0.8 }; // for faceres model\n\nconst options = {\n minConfidence: 0.6, // overal face confidence for box, face, gender, real, live\n minSize: 224, // min input to face descriptor model before degradation\n maxTime: 30000, // max time before giving up\n blinkMin: 10, // minimum duration of a valid blink\n blinkMax: 800, // maximum duration of a valid blink\n threshold: 0.5, // minimum similarity\n distanceMin: 0.4, // closest that face is allowed to be to the cammera in cm\n distanceMax: 1.0, // farthest that face is allowed to be to the cammera in cm\n mask: humanConfig.face.detector.mask,\n rotation: humanConfig.face.detector.rotation,\n cropFactor: humanConfig.face.detector.cropFactor,\n ...matchOptions,\n};\n\nconst ok: Record = { // must meet all rules\n faceCount: { status: false, val: 0 },\n faceConfidence: { status: false, val: 0 },\n facingCenter: { status: false, val: 0 },\n lookingCenter: { status: false, val: 0 },\n blinkDetected: { status: false, val: 0 },\n faceSize: { status: false, val: 0 },\n antispoofCheck: { status: false, val: 0 },\n livenessCheck: { status: false, val: 0 },\n distance: { status: false, val: 0 },\n age: { status: false, val: 0 },\n gender: { status: false, val: 0 },\n timeout: { status: true, val: 0 },\n descriptor: { status: false, val: 0 },\n elapsedMs: { status: undefined, val: 0 }, // total time while waiting for valid face\n detectFPS: { status: undefined, val: 0 }, // mark detection fps performance\n drawFPS: { status: undefined, val: 0 }, // mark redraw fps performance\n};\n\nconst allOk = () => ok.faceCount.status\n && ok.faceSize.status\n && ok.blinkDetected.status\n && ok.facingCenter.status\n && ok.lookingCenter.status\n && ok.faceConfidence.status\n && ok.antispoofCheck.status\n && ok.livenessCheck.status\n && ok.distance.status\n && ok.descriptor.status\n && ok.age.status\n && ok.gender.status;\n\nconst current: { face: H.FaceResult | null, record: indexDb.FaceRecord | null } = { face: null, record: null }; // current face record and matched database record\n\nconst blink = { // internal timers for blink start/end/duration\n start: 0,\n end: 0,\n time: 0,\n};\n\n// let db: Array<{ name: string, source: string, embedding: number[] }> = []; // holds loaded face descriptor database\nconst human = new H.Human(humanConfig); // create instance of human with overrides from user configuration\n\nhuman.env.perfadd = false; // is performance data showing instant or total values\nhuman.draw.options.font = 'small-caps 18px \"Lato\"'; // set font used to draw labels when using draw methods\nhuman.draw.options.lineHeight = 20;\n\nconst dom = { // grab instances of dom objects so we dont have to look them up later\n video: document.getElementById('video') as HTMLVideoElement,\n canvas: document.getElementById('canvas') as HTMLCanvasElement,\n log: document.getElementById('log') as HTMLPreElement,\n fps: document.getElementById('fps') as HTMLPreElement,\n match: document.getElementById('match') as HTMLDivElement,\n name: document.getElementById('name') as HTMLInputElement,\n save: document.getElementById('save') as HTMLSpanElement,\n delete: document.getElementById('delete') as HTMLSpanElement,\n retry: document.getElementById('retry') as HTMLDivElement,\n source: document.getElementById('source') as HTMLCanvasElement,\n ok: document.getElementById('ok') as HTMLDivElement,\n};\nconst timestamp = { detect: 0, draw: 0 }; // holds information used to calculate performance and possible memory leaks\nlet startTime = 0;\n\nconst log = (...msg) => { // helper method to output messages\n dom.log.innerText += msg.join(' ') + '\\n';\n console.log(...msg); // eslint-disable-line no-console\n};\n\nasync function webCam() { // initialize webcam\n // @ts-ignore resizeMode is not yet defined in tslib\n const cameraOptions: MediaStreamConstraints = { audio: false, video: { facingMode: 'user', resizeMode: 'none', width: { ideal: document.body.clientWidth } } };\n const stream: MediaStream = await navigator.mediaDevices.getUserMedia(cameraOptions);\n const ready = new Promise((resolve) => { dom.video.onloadeddata = () => resolve(true); });\n dom.video.srcObject = stream;\n void dom.video.play();\n await ready;\n dom.canvas.width = dom.video.videoWidth;\n dom.canvas.height = dom.video.videoHeight;\n dom.canvas.style.width = '50%';\n dom.canvas.style.height = '50%';\n if (human.env.initial) log('video:', dom.video.videoWidth, dom.video.videoHeight, '|', stream.getVideoTracks()[0].label);\n dom.canvas.onclick = () => { // pause when clicked on screen and resume on next click\n if (dom.video.paused) void dom.video.play();\n else dom.video.pause();\n };\n}\n\nasync function detectionLoop() { // main detection loop\n if (!dom.video.paused) {\n if (current.face?.tensor) human.tf.dispose(current.face.tensor); // dispose previous tensor\n await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result\n const now = human.now();\n ok.detectFPS.val = Math.round(10000 / (now - timestamp.detect)) / 10;\n timestamp.detect = now;\n requestAnimationFrame(detectionLoop); // start new frame immediately\n }\n}\n\nfunction drawValidationTests() {\n let y = 32;\n for (const [key, val] of Object.entries(ok)) {\n let el = document.getElementById(`ok-${key}`);\n if (!el) {\n el = document.createElement('div');\n el.id = `ok-${key}`;\n el.innerText = key;\n el.className = 'ok';\n el.style.top = `${y}px`;\n dom.ok.appendChild(el);\n }\n if (typeof val.status === 'boolean') el.style.backgroundColor = val.status ? 'lightgreen' : 'lightcoral';\n const status = val.status ? 'ok' : 'fail';\n el.innerText = `${key}: ${val.val === 0 ? status : val.val}`;\n y += 28;\n }\n}\n\nasync function validationLoop(): Promise { // main screen refresh loop\n const interpolated = human.next(human.result); // smoothen result using last-known results\n human.draw.canvas(dom.video, dom.canvas); // draw canvas to screen\n await human.draw.all(dom.canvas, interpolated); // draw labels, boxes, lines, etc.\n const now = human.now();\n ok.drawFPS.val = Math.round(10000 / (now - timestamp.draw)) / 10;\n timestamp.draw = now;\n ok.faceCount.val = human.result.face.length;\n ok.faceCount.status = ok.faceCount.val === 1; // must be exactly detected face\n if (ok.faceCount.status) { // skip the rest if no face\n const gestures: string[] = Object.values(human.result.gesture).map((gesture: H.GestureResult) => gesture.gesture); // flatten all gestures\n if (gestures.includes('blink left eye') || gestures.includes('blink right eye')) blink.start = human.now(); // blink starts when eyes get closed\n if (blink.start > 0 && !gestures.includes('blink left eye') && !gestures.includes('blink right eye')) blink.end = human.now(); // if blink started how long until eyes are back open\n ok.blinkDetected.status = ok.blinkDetected.status || (Math.abs(blink.end - blink.start) > options.blinkMin && Math.abs(blink.end - blink.start) < options.blinkMax);\n if (ok.blinkDetected.status && blink.time === 0) blink.time = Math.trunc(blink.end - blink.start);\n ok.facingCenter.status = gestures.includes('facing center');\n ok.lookingCenter.status = gestures.includes('looking center'); // must face camera and look at camera\n ok.faceConfidence.val = human.result.face[0].faceScore || human.result.face[0].boxScore || 0;\n ok.faceConfidence.status = ok.faceConfidence.val >= options.minConfidence;\n ok.antispoofCheck.val = human.result.face[0].real || 0;\n ok.antispoofCheck.status = ok.antispoofCheck.val >= options.minConfidence;\n ok.livenessCheck.val = human.result.face[0].live || 0;\n ok.livenessCheck.status = ok.livenessCheck.val >= options.minConfidence;\n ok.faceSize.val = Math.min(human.result.face[0].box[2], human.result.face[0].box[3]);\n ok.faceSize.status = ok.faceSize.val >= options.minSize;\n ok.distance.val = human.result.face[0].distance || 0;\n ok.distance.status = (ok.distance.val >= options.distanceMin) && (ok.distance.val <= options.distanceMax);\n ok.descriptor.val = human.result.face[0].embedding?.length || 0;\n ok.descriptor.status = ok.descriptor.val > 0;\n ok.age.val = human.result.face[0].age || 0;\n ok.age.status = ok.age.val > 0;\n ok.gender.val = human.result.face[0].genderScore || 0;\n ok.gender.status = ok.gender.val >= options.minConfidence;\n }\n // run again\n ok.timeout.status = ok.elapsedMs.val <= options.maxTime;\n drawValidationTests();\n if (allOk() || !ok.timeout.status) { // all criteria met\n dom.video.pause();\n return human.result.face[0];\n }\n ok.elapsedMs.val = Math.trunc(human.now() - startTime);\n return new Promise((resolve) => {\n setTimeout(async () => {\n await validationLoop(); // run validation loop until conditions are met\n resolve(human.result.face[0]); // recursive promise resolve\n }, 30); // use to slow down refresh from max refresh rate to target of 30 fps\n });\n}\n\nasync function saveRecords() {\n if (dom.name.value.length > 0) {\n const image = dom.canvas.getContext('2d')?.getImageData(0, 0, dom.canvas.width, dom.canvas.height) as ImageData;\n const rec = { id: 0, name: dom.name.value, descriptor: current.face?.embedding as number[], image };\n await indexDb.save(rec);\n log('saved face record:', rec.name, 'descriptor length:', current.face?.embedding?.length);\n log('known face records:', await indexDb.count());\n } else {\n log('invalid name');\n }\n}\n\nasync function deleteRecord() {\n if (current.record && current.record.id > 0) {\n await indexDb.remove(current.record);\n }\n}\n\nasync function detectFace() {\n dom.canvas.style.height = '';\n dom.canvas.getContext('2d')?.clearRect(0, 0, options.minSize, options.minSize);\n if (!current?.face?.tensor || !current?.face?.embedding) return false;\n console.log('face record:', current.face); // eslint-disable-line no-console\n log(`detected face: ${current.face.gender} ${current.face.age || 0}y distance ${100 * (current.face.distance || 0)}cm/${Math.round(100 * (current.face.distance || 0) / 2.54)}in`);\n await human.tf.browser.toPixels(current.face.tensor, dom.canvas);\n if (await indexDb.count() === 0) {\n log('face database is empty: nothing to compare face with');\n document.body.style.background = 'black';\n dom.delete.style.display = 'none';\n return false;\n }\n const db = await indexDb.load();\n const descriptors = db.map((rec) => rec.descriptor).filter((desc) => desc.length > 0);\n const res = human.match.find(current.face.embedding, descriptors, matchOptions);\n current.record = db[res.index] || null;\n if (current.record) {\n log(`best match: ${current.record.name} | id: ${current.record.id} | similarity: ${Math.round(1000 * res.similarity) / 10}%`);\n dom.name.value = current.record.name;\n dom.source.style.display = '';\n dom.source.getContext('2d')?.putImageData(current.record.image, 0, 0);\n }\n document.body.style.background = res.similarity > options.threshold ? 'darkgreen' : 'maroon';\n return res.similarity > options.threshold;\n}\n\nasync function main() { // main entry point\n ok.faceCount.status = false;\n ok.faceConfidence.status = false;\n ok.facingCenter.status = false;\n ok.blinkDetected.status = false;\n ok.faceSize.status = false;\n ok.antispoofCheck.status = false;\n ok.livenessCheck.status = false;\n ok.age.status = false;\n ok.gender.status = false;\n ok.elapsedMs.val = 0;\n dom.match.style.display = 'none';\n dom.retry.style.display = 'none';\n dom.source.style.display = 'none';\n dom.canvas.style.height = '50%';\n document.body.style.background = 'black';\n await webCam();\n await detectionLoop(); // start detection loop\n startTime = human.now();\n current.face = await validationLoop(); // start validation loop\n dom.canvas.width = current.face?.tensor?.shape[1] || options.minSize;\n dom.canvas.height = current.face?.tensor?.shape[0] || options.minSize;\n dom.source.width = dom.canvas.width;\n dom.source.height = dom.canvas.height;\n dom.canvas.style.width = '';\n dom.match.style.display = 'flex';\n dom.save.style.display = 'flex';\n dom.delete.style.display = 'flex';\n dom.retry.style.display = 'block';\n if (!allOk()) { // is all criteria met?\n log('did not find valid face');\n return false;\n }\n return detectFace();\n}\n\nasync function init() {\n log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']);\n log('options:', JSON.stringify(options).replace(/{|}|\"|\\[|\\]/g, '').replace(/,/g, ' '));\n log('initializing webcam...');\n await webCam(); // start webcam\n log('loading human models...');\n await human.load(); // preload all models\n log('initializing human...');\n log('face embedding model:', humanConfig.face.description.enabled ? 'faceres' : '', humanConfig.face['mobilefacenet']?.enabled ? 'mobilefacenet' : '', humanConfig.face['insightface']?.enabled ? 'insightface' : '');\n log('loading face database...');\n log('known face records:', await indexDb.count());\n dom.retry.addEventListener('click', main);\n dom.save.addEventListener('click', saveRecords);\n dom.delete.addEventListener('click', deleteRecord);\n await human.warmup(); // warmup function to initialize backend for future faster detection\n await main();\n}\n\nwindow.onload = init;\n", "let db: IDBDatabase; // instance of indexdb\n\nconst database = 'human';\nconst table = 'person';\n\nexport interface FaceRecord { id: number, name: string, descriptor: number[], image: ImageData }\n\nconst log = (...msg) => console.log('indexdb', ...msg); // eslint-disable-line no-console\n\nexport async function open() {\n if (db) return true;\n return new Promise((resolve) => {\n const request: IDBOpenDBRequest = indexedDB.open(database, 1);\n request.onerror = (evt) => log('error:', evt);\n request.onupgradeneeded = (evt: IDBVersionChangeEvent) => { // create if doesnt exist\n log('create:', evt.target);\n db = (evt.target as IDBOpenDBRequest).result;\n db.createObjectStore(table, { keyPath: 'id', autoIncrement: true });\n };\n request.onsuccess = (evt) => { // open\n db = (evt.target as IDBOpenDBRequest).result;\n log('open:', db);\n resolve(true);\n };\n });\n}\n\nexport async function load(): Promise {\n const faceDB: FaceRecord[] = [];\n if (!db) await open(); // open or create if not already done\n return new Promise((resolve) => {\n const cursor: IDBRequest = db.transaction([table], 'readwrite').objectStore(table).openCursor(null, 'next');\n cursor.onerror = (evt) => log('load error:', evt);\n cursor.onsuccess = (evt) => {\n if ((evt.target as IDBRequest).result) {\n faceDB.push((evt.target as IDBRequest).result.value);\n (evt.target as IDBRequest).result.continue();\n } else {\n resolve(faceDB);\n }\n };\n });\n}\n\nexport async function count(): Promise {\n if (!db) await open(); // open or create if not already done\n return new Promise((resolve) => {\n const store: IDBRequest = db.transaction([table], 'readwrite').objectStore(table).count();\n store.onerror = (evt) => log('count error:', evt);\n store.onsuccess = () => resolve(store.result);\n });\n}\n\nexport async function save(faceRecord: FaceRecord) {\n if (!db) await open(); // open or create if not already done\n const newRecord = { name: faceRecord.name, descriptor: faceRecord.descriptor, image: faceRecord.image }; // omit id as its autoincrement\n db.transaction([table], 'readwrite').objectStore(table).put(newRecord);\n log('save:', newRecord);\n}\n\nexport async function remove(faceRecord: FaceRecord) {\n if (!db) await open(); // open or create if not already done\n db.transaction([table], 'readwrite').objectStore(table).delete(faceRecord.id); // delete based on id\n log('delete:', faceRecord);\n}\n"], + "mappings": ";;;;;;AASA,UAAYA,MAAO,0BCTnB,IAAIC,EAEEC,EAAW,QACXC,EAAQ,SAIRC,EAAM,IAAIC,IAAQ,QAAQ,IAAI,UAAW,GAAGA,CAAG,EAErD,eAAsBC,GAAO,CAC3B,OAAIL,EAAW,GACR,IAAI,QAASM,GAAY,CAC9B,IAAMC,EAA4B,UAAU,KAAKN,EAAU,CAAC,EAC5DM,EAAQ,QAAWC,GAAQL,EAAI,SAAUK,CAAG,EAC5CD,EAAQ,gBAAmBC,GAA+B,CACxDL,EAAI,UAAWK,EAAI,MAAM,EACzBR,EAAMQ,EAAI,OAA4B,OACtCR,EAAG,kBAAkBE,EAAO,CAAE,QAAS,KAAM,cAAe,EAAK,CAAC,CACpE,EACAK,EAAQ,UAAaC,GAAQ,CAC3BR,EAAMQ,EAAI,OAA4B,OACtCL,EAAI,QAASH,CAAE,EACfM,EAAQ,EAAI,CACd,CACF,CAAC,CACH,CAEA,eAAsBG,GAA8B,CAClD,IAAMC,EAAuB,CAAC,EAC9B,OAAKV,GAAI,MAAMK,EAAK,EACb,IAAI,QAASC,GAAY,CAC9B,IAAMK,EAAqBX,EAAG,YAAY,CAACE,CAAK,EAAG,WAAW,EAAE,YAAYA,CAAK,EAAE,WAAW,KAAM,MAAM,EAC1GS,EAAO,QAAWH,GAAQL,EAAI,cAAeK,CAAG,EAChDG,EAAO,UAAaH,GAAQ,CACrBA,EAAI,OAAsB,QAC7BE,EAAO,KAAMF,EAAI,OAAsB,OAAO,KAAK,EAClDA,EAAI,OAAsB,OAAO,SAAS,GAE3CF,EAAQI,CAAM,CAElB,CACF,CAAC,CACH,CAEA,eAAsBE,GAAyB,CAC7C,OAAKZ,GAAI,MAAMK,EAAK,EACb,IAAI,QAASC,GAAY,CAC9B,IAAMO,EAAoBb,EAAG,YAAY,CAACE,CAAK,EAAG,WAAW,EAAE,YAAYA,CAAK,EAAE,MAAM,EACxFW,EAAM,QAAWL,GAAQL,EAAI,eAAgBK,CAAG,EAChDK,EAAM,UAAY,IAAMP,EAAQO,EAAM,MAAM,CAC9C,CAAC,CACH,CAEA,eAAsBC,EAAKC,EAAwB,CAC5Cf,GAAI,MAAMK,EAAK,EACpB,IAAMW,EAAY,CAAE,KAAMD,EAAW,KAAM,WAAYA,EAAW,WAAY,MAAOA,EAAW,KAAM,EACtGf,EAAG,YAAY,CAACE,CAAK,EAAG,WAAW,EAAE,YAAYA,CAAK,EAAE,IAAIc,CAAS,EACrEb,EAAI,QAASa,CAAS,CACxB,CAEA,eAAsBC,EAAOF,EAAwB,CAC9Cf,GAAI,MAAMK,EAAK,EACpBL,EAAG,YAAY,CAACE,CAAK,EAAG,WAAW,EAAE,YAAYA,CAAK,EAAE,OAAOa,EAAW,EAAE,EAC5EZ,EAAI,UAAWY,CAAU,CAC3B,CDpDA,IAAMG,EAAc,CAClB,iBAAkB,EAClB,cAAe,eACf,OAAQ,CAAE,QAAS,GAAM,aAAc,EAAK,EAC5C,MAAO,GACP,KAAM,CACJ,QAAS,GACT,SAAU,CAAE,SAAU,GAAM,OAAQ,GAAM,WAAY,IAAK,KAAM,EAAM,EACvE,YAAa,CAAE,QAAS,EAAK,EAG7B,KAAM,CAAE,QAAS,EAAK,EACtB,QAAS,CAAE,QAAS,EAAM,EAC1B,UAAW,CAAE,QAAS,EAAK,EAC3B,SAAU,CAAE,QAAS,EAAK,CAC5B,EACA,KAAM,CAAE,QAAS,EAAM,EACvB,KAAM,CAAE,QAAS,EAAM,EACvB,OAAQ,CAAE,QAAS,EAAM,EACzB,QAAS,CAAE,QAAS,EAAK,CAC3B,EAGMC,EAAe,CAAE,MAAO,EAAG,WAAY,GAAI,IAAK,GAAK,IAAK,EAAI,EAE9DC,EAAU,CACd,cAAe,GACf,QAAS,IACT,QAAS,IACT,SAAU,GACV,SAAU,IACV,UAAW,GACX,YAAa,GACb,YAAa,EACb,KAAMF,EAAY,KAAK,SAAS,KAChC,SAAUA,EAAY,KAAK,SAAS,SACpC,WAAYA,EAAY,KAAK,SAAS,WACtC,GAAGC,CACL,EAEME,EAAmE,CACvE,UAAW,CAAE,OAAQ,GAAO,IAAK,CAAE,EACnC,eAAgB,CAAE,OAAQ,GAAO,IAAK,CAAE,EACxC,aAAc,CAAE,OAAQ,GAAO,IAAK,CAAE,EACtC,cAAe,CAAE,OAAQ,GAAO,IAAK,CAAE,EACvC,cAAe,CAAE,OAAQ,GAAO,IAAK,CAAE,EACvC,SAAU,CAAE,OAAQ,GAAO,IAAK,CAAE,EAClC,eAAgB,CAAE,OAAQ,GAAO,IAAK,CAAE,EACxC,cAAe,CAAE,OAAQ,GAAO,IAAK,CAAE,EACvC,SAAU,CAAE,OAAQ,GAAO,IAAK,CAAE,EAClC,IAAK,CAAE,OAAQ,GAAO,IAAK,CAAE,EAC7B,OAAQ,CAAE,OAAQ,GAAO,IAAK,CAAE,EAChC,QAAS,CAAE,OAAQ,GAAM,IAAK,CAAE,EAChC,WAAY,CAAE,OAAQ,GAAO,IAAK,CAAE,EACpC,UAAW,CAAE,OAAQ,OAAW,IAAK,CAAE,EACvC,UAAW,CAAE,OAAQ,OAAW,IAAK,CAAE,EACvC,QAAS,CAAE,OAAQ,OAAW,IAAK,CAAE,CACvC,EAEMC,EAAQ,IAAMD,EAAG,UAAU,QAC5BA,EAAG,SAAS,QACZA,EAAG,cAAc,QACjBA,EAAG,aAAa,QAChBA,EAAG,cAAc,QACjBA,EAAG,eAAe,QAClBA,EAAG,eAAe,QAClBA,EAAG,cAAc,QACjBA,EAAG,SAAS,QACZA,EAAG,WAAW,QACdA,EAAG,IAAI,QACPA,EAAG,OAAO,OAETE,EAA4E,CAAE,KAAM,KAAM,OAAQ,IAAK,EAEvGC,EAAQ,CACZ,MAAO,EACP,IAAK,EACL,KAAM,CACR,EAGMC,EAAQ,IAAM,QAAMP,CAAW,EAErCO,EAAM,IAAI,QAAU,GACpBA,EAAM,KAAK,QAAQ,KAAO,yBAC1BA,EAAM,KAAK,QAAQ,WAAa,GAEhC,IAAMC,EAAM,CACV,MAAO,SAAS,eAAe,OAAO,EACtC,OAAQ,SAAS,eAAe,QAAQ,EACxC,IAAK,SAAS,eAAe,KAAK,EAClC,IAAK,SAAS,eAAe,KAAK,EAClC,MAAO,SAAS,eAAe,OAAO,EACtC,KAAM,SAAS,eAAe,MAAM,EACpC,KAAM,SAAS,eAAe,MAAM,EACpC,OAAQ,SAAS,eAAe,QAAQ,EACxC,MAAO,SAAS,eAAe,OAAO,EACtC,OAAQ,SAAS,eAAe,QAAQ,EACxC,GAAI,SAAS,eAAe,IAAI,CAClC,EACMC,EAAY,CAAE,OAAQ,EAAG,KAAM,CAAE,EACnCC,EAAY,EAEVC,EAAM,IAAIC,IAAQ,CACtBJ,EAAI,IAAI,WAAaI,EAAI,KAAK,GAAG,EAAI;AAAA,EACrC,QAAQ,IAAI,GAAGA,CAAG,CACpB,EAEA,eAAeC,GAAS,CAEtB,IAAMC,EAAwC,CAAE,MAAO,GAAO,MAAO,CAAE,WAAY,OAAQ,WAAY,OAAQ,MAAO,CAAE,MAAO,SAAS,KAAK,WAAY,CAAE,CAAE,EACvJC,EAAsB,MAAM,UAAU,aAAa,aAAaD,CAAa,EAC7EE,EAAQ,IAAI,QAASC,GAAY,CAAET,EAAI,MAAM,aAAe,IAAMS,EAAQ,EAAI,CAAG,CAAC,EACxFT,EAAI,MAAM,UAAYO,EACjBP,EAAI,MAAM,KAAK,EACpB,MAAMQ,EACNR,EAAI,OAAO,MAAQA,EAAI,MAAM,WAC7BA,EAAI,OAAO,OAASA,EAAI,MAAM,YAC9BA,EAAI,OAAO,MAAM,MAAQ,MACzBA,EAAI,OAAO,MAAM,OAAS,MACtBD,EAAM,IAAI,SAASI,EAAI,SAAUH,EAAI,MAAM,WAAYA,EAAI,MAAM,YAAa,IAAKO,EAAO,eAAe,EAAE,GAAG,KAAK,EACvHP,EAAI,OAAO,QAAU,IAAM,CACrBA,EAAI,MAAM,OAAaA,EAAI,MAAM,KAAK,EACrCA,EAAI,MAAM,MAAM,CACvB,CACF,CAEA,eAAeU,GAAgB,CA3I/B,IAAAC,EA4IE,GAAI,CAACX,EAAI,MAAM,OAAQ,EACjBW,EAAAd,EAAQ,OAAR,MAAAc,EAAc,QAAQZ,EAAM,GAAG,QAAQF,EAAQ,KAAK,MAAM,EAC9D,MAAME,EAAM,OAAOC,EAAI,KAAK,EAC5B,IAAMY,EAAMb,EAAM,IAAI,EACtBJ,EAAG,UAAU,IAAM,KAAK,MAAM,KAASiB,EAAMX,EAAU,OAAO,EAAI,GAClEA,EAAU,OAASW,EACnB,sBAAsBF,CAAa,CACrC,CACF,CAEA,SAASG,GAAsB,CAC7B,IAAIC,EAAI,GACR,OAAW,CAACC,EAAKC,CAAG,IAAK,OAAO,QAAQrB,CAAE,EAAG,CAC3C,IAAIsB,EAAK,SAAS,eAAe,MAAMF,GAAK,EACvCE,IACHA,EAAK,SAAS,cAAc,KAAK,EACjCA,EAAG,GAAK,MAAMF,IACdE,EAAG,UAAYF,EACfE,EAAG,UAAY,KACfA,EAAG,MAAM,IAAM,GAAGH,MAClBd,EAAI,GAAG,YAAYiB,CAAE,GAEnB,OAAOD,EAAI,QAAW,YAAWC,EAAG,MAAM,gBAAkBD,EAAI,OAAS,aAAe,cAC5F,IAAME,EAASF,EAAI,OAAS,KAAO,OACnCC,EAAG,UAAY,GAAGF,MAAQC,EAAI,MAAQ,EAAIE,EAASF,EAAI,MACvDF,GAAK,EACP,CACF,CAEA,eAAeK,GAAwC,CAzKvD,IAAAR,EA0KE,IAAMS,EAAerB,EAAM,KAAKA,EAAM,MAAM,EAC5CA,EAAM,KAAK,OAAOC,EAAI,MAAOA,EAAI,MAAM,EACvC,MAAMD,EAAM,KAAK,IAAIC,EAAI,OAAQoB,CAAY,EAC7C,IAAMR,EAAMb,EAAM,IAAI,EAKtB,GAJAJ,EAAG,QAAQ,IAAM,KAAK,MAAM,KAASiB,EAAMX,EAAU,KAAK,EAAI,GAC9DA,EAAU,KAAOW,EACjBjB,EAAG,UAAU,IAAMI,EAAM,OAAO,KAAK,OACrCJ,EAAG,UAAU,OAASA,EAAG,UAAU,MAAQ,EACvCA,EAAG,UAAU,OAAQ,CACvB,IAAM0B,EAAqB,OAAO,OAAOtB,EAAM,OAAO,OAAO,EAAE,IAAKuB,GAA6BA,EAAQ,OAAO,GAC5GD,EAAS,SAAS,gBAAgB,GAAKA,EAAS,SAAS,iBAAiB,KAAGvB,EAAM,MAAQC,EAAM,IAAI,GACrGD,EAAM,MAAQ,GAAK,CAACuB,EAAS,SAAS,gBAAgB,GAAK,CAACA,EAAS,SAAS,iBAAiB,IAAGvB,EAAM,IAAMC,EAAM,IAAI,GAC5HJ,EAAG,cAAc,OAASA,EAAG,cAAc,QAAW,KAAK,IAAIG,EAAM,IAAMA,EAAM,KAAK,EAAIJ,EAAQ,UAAY,KAAK,IAAII,EAAM,IAAMA,EAAM,KAAK,EAAIJ,EAAQ,SACtJC,EAAG,cAAc,QAAUG,EAAM,OAAS,IAAGA,EAAM,KAAO,KAAK,MAAMA,EAAM,IAAMA,EAAM,KAAK,GAChGH,EAAG,aAAa,OAAS0B,EAAS,SAAS,eAAe,EAC1D1B,EAAG,cAAc,OAAS0B,EAAS,SAAS,gBAAgB,EAC5D1B,EAAG,eAAe,IAAMI,EAAM,OAAO,KAAK,GAAG,WAAaA,EAAM,OAAO,KAAK,GAAG,UAAY,EAC3FJ,EAAG,eAAe,OAASA,EAAG,eAAe,KAAOD,EAAQ,cAC5DC,EAAG,eAAe,IAAMI,EAAM,OAAO,KAAK,GAAG,MAAQ,EACrDJ,EAAG,eAAe,OAASA,EAAG,eAAe,KAAOD,EAAQ,cAC5DC,EAAG,cAAc,IAAMI,EAAM,OAAO,KAAK,GAAG,MAAQ,EACpDJ,EAAG,cAAc,OAASA,EAAG,cAAc,KAAOD,EAAQ,cAC1DC,EAAG,SAAS,IAAM,KAAK,IAAII,EAAM,OAAO,KAAK,GAAG,IAAI,GAAIA,EAAM,OAAO,KAAK,GAAG,IAAI,EAAE,EACnFJ,EAAG,SAAS,OAASA,EAAG,SAAS,KAAOD,EAAQ,QAChDC,EAAG,SAAS,IAAMI,EAAM,OAAO,KAAK,GAAG,UAAY,EACnDJ,EAAG,SAAS,OAAUA,EAAG,SAAS,KAAOD,EAAQ,aAAiBC,EAAG,SAAS,KAAOD,EAAQ,YAC7FC,EAAG,WAAW,MAAMgB,EAAAZ,EAAM,OAAO,KAAK,GAAG,YAArB,YAAAY,EAAgC,SAAU,EAC9DhB,EAAG,WAAW,OAASA,EAAG,WAAW,IAAM,EAC3CA,EAAG,IAAI,IAAMI,EAAM,OAAO,KAAK,GAAG,KAAO,EACzCJ,EAAG,IAAI,OAASA,EAAG,IAAI,IAAM,EAC7BA,EAAG,OAAO,IAAMI,EAAM,OAAO,KAAK,GAAG,aAAe,EACpDJ,EAAG,OAAO,OAASA,EAAG,OAAO,KAAOD,EAAQ,aAC9C,CAIA,OAFAC,EAAG,QAAQ,OAASA,EAAG,UAAU,KAAOD,EAAQ,QAChDmB,EAAoB,EAChBjB,EAAM,GAAK,CAACD,EAAG,QAAQ,QACzBK,EAAI,MAAM,MAAM,EACTD,EAAM,OAAO,KAAK,KAE3BJ,EAAG,UAAU,IAAM,KAAK,MAAMI,EAAM,IAAI,EAAIG,CAAS,EAC9C,IAAI,QAASO,GAAY,CAC9B,WAAW,SAAY,CACrB,MAAMU,EAAe,EACrBV,EAAQV,EAAM,OAAO,KAAK,EAAE,CAC9B,EAAG,EAAE,CACP,CAAC,EACH,CAEA,eAAewB,GAAc,CA3N7B,IAAAZ,EAAAa,EAAAC,EAAAC,EA4NE,GAAI1B,EAAI,KAAK,MAAM,OAAS,EAAG,CAC7B,IAAM2B,GAAQhB,EAAAX,EAAI,OAAO,WAAW,IAAI,IAA1B,YAAAW,EAA6B,aAAa,EAAG,EAAGX,EAAI,OAAO,MAAOA,EAAI,OAAO,QACrF4B,EAAM,CAAE,GAAI,EAAG,KAAM5B,EAAI,KAAK,MAAO,YAAYwB,EAAA3B,EAAQ,OAAR,YAAA2B,EAAc,UAAuB,MAAAG,CAAM,EAClG,MAAcE,EAAKD,CAAG,EACtBzB,EAAI,qBAAsByB,EAAI,KAAM,sBAAsBF,GAAAD,EAAA5B,EAAQ,OAAR,YAAA4B,EAAc,YAAd,YAAAC,EAAyB,MAAM,EACzFvB,EAAI,sBAAuB,MAAc2B,EAAM,CAAC,CAClD,MACE3B,EAAI,cAAc,CAEtB,CAEA,eAAe4B,GAAe,CACxBlC,EAAQ,QAAUA,EAAQ,OAAO,GAAK,GACxC,MAAcmC,EAAOnC,EAAQ,MAAM,CAEvC,CAEA,eAAeoC,GAAa,CA7O5B,IAAAtB,EAAAa,EAAAC,EAAAC,EAgPE,GAFA1B,EAAI,OAAO,MAAM,OAAS,IAC1BW,EAAAX,EAAI,OAAO,WAAW,IAAI,IAA1B,MAAAW,EAA6B,UAAU,EAAG,EAAGjB,EAAQ,QAASA,EAAQ,SAClE,GAAC8B,EAAA3B,GAAA,YAAAA,EAAS,OAAT,MAAA2B,EAAe,SAAU,GAACC,EAAA5B,GAAA,YAAAA,EAAS,OAAT,MAAA4B,EAAe,WAAW,MAAO,GAIhE,GAHA,QAAQ,IAAI,eAAgB5B,EAAQ,IAAI,EACxCM,EAAI,kBAAkBN,EAAQ,KAAK,UAAUA,EAAQ,KAAK,KAAO,eAAe,KAAOA,EAAQ,KAAK,UAAY,QAAQ,KAAK,MAAM,KAAOA,EAAQ,KAAK,UAAY,GAAK,IAAI,KAAK,EACjL,MAAME,EAAM,GAAG,QAAQ,SAASF,EAAQ,KAAK,OAAQG,EAAI,MAAM,EAC3D,MAAc8B,EAAM,IAAM,EAC5B,OAAA3B,EAAI,sDAAsD,EAC1D,SAAS,KAAK,MAAM,WAAa,QACjCH,EAAI,OAAO,MAAM,QAAU,OACpB,GAET,IAAMkC,EAAK,MAAcC,EAAK,EACxBC,EAAcF,EAAG,IAAKN,GAAQA,EAAI,UAAU,EAAE,OAAQS,GAASA,EAAK,OAAS,CAAC,EAC9EC,EAAMvC,EAAM,MAAM,KAAKF,EAAQ,KAAK,UAAWuC,EAAa3C,CAAY,EAC9E,OAAAI,EAAQ,OAASqC,EAAGI,EAAI,QAAU,KAC9BzC,EAAQ,SACVM,EAAI,eAAeN,EAAQ,OAAO,cAAcA,EAAQ,OAAO,oBAAoB,KAAK,MAAM,IAAOyC,EAAI,UAAU,EAAI,KAAK,EAC5HtC,EAAI,KAAK,MAAQH,EAAQ,OAAO,KAChCG,EAAI,OAAO,MAAM,QAAU,IAC3B0B,EAAA1B,EAAI,OAAO,WAAW,IAAI,IAA1B,MAAA0B,EAA6B,aAAa7B,EAAQ,OAAO,MAAO,EAAG,IAErE,SAAS,KAAK,MAAM,WAAayC,EAAI,WAAa5C,EAAQ,UAAY,YAAc,SAC7E4C,EAAI,WAAa5C,EAAQ,SAClC,CAEA,eAAe6C,GAAO,CAxQtB,IAAA5B,EAAAa,EAAAC,EAAAC,EAqSE,OA5BA/B,EAAG,UAAU,OAAS,GACtBA,EAAG,eAAe,OAAS,GAC3BA,EAAG,aAAa,OAAS,GACzBA,EAAG,cAAc,OAAS,GAC1BA,EAAG,SAAS,OAAS,GACrBA,EAAG,eAAe,OAAS,GAC3BA,EAAG,cAAc,OAAS,GAC1BA,EAAG,IAAI,OAAS,GAChBA,EAAG,OAAO,OAAS,GACnBA,EAAG,UAAU,IAAM,EACnBK,EAAI,MAAM,MAAM,QAAU,OAC1BA,EAAI,MAAM,MAAM,QAAU,OAC1BA,EAAI,OAAO,MAAM,QAAU,OAC3BA,EAAI,OAAO,MAAM,OAAS,MAC1B,SAAS,KAAK,MAAM,WAAa,QACjC,MAAMK,EAAO,EACb,MAAMK,EAAc,EACpBR,EAAYH,EAAM,IAAI,EACtBF,EAAQ,KAAO,MAAMsB,EAAe,EACpCnB,EAAI,OAAO,QAAQwB,GAAAb,EAAAd,EAAQ,OAAR,YAAAc,EAAc,SAAd,YAAAa,EAAsB,MAAM,KAAM9B,EAAQ,QAC7DM,EAAI,OAAO,SAAS0B,GAAAD,EAAA5B,EAAQ,OAAR,YAAA4B,EAAc,SAAd,YAAAC,EAAsB,MAAM,KAAMhC,EAAQ,QAC9DM,EAAI,OAAO,MAAQA,EAAI,OAAO,MAC9BA,EAAI,OAAO,OAASA,EAAI,OAAO,OAC/BA,EAAI,OAAO,MAAM,MAAQ,GACzBA,EAAI,MAAM,MAAM,QAAU,OAC1BA,EAAI,KAAK,MAAM,QAAU,OACzBA,EAAI,OAAO,MAAM,QAAU,OAC3BA,EAAI,MAAM,MAAM,QAAU,QACrBJ,EAAM,EAIJqC,EAAW,GAHhB9B,EAAI,yBAAyB,EACtB,GAGX,CAEA,eAAeqC,GAAO,CA5StB,IAAA7B,EAAAa,EA6SErB,EAAI,iBAAkBJ,EAAM,QAAS,kBAAmBA,EAAM,GAAG,QAAQ,YAAY,EACrFI,EAAI,WAAY,KAAK,UAAUT,CAAO,EAAE,QAAQ,eAAgB,EAAE,EAAE,QAAQ,KAAM,GAAG,CAAC,EACtFS,EAAI,wBAAwB,EAC5B,MAAME,EAAO,EACbF,EAAI,yBAAyB,EAC7B,MAAMJ,EAAM,KAAK,EACjBI,EAAI,uBAAuB,EAC3BA,EAAI,wBAAyBX,EAAY,KAAK,YAAY,QAAU,UAAY,IAAImB,EAAAnB,EAAY,KAAK,gBAAjB,MAAAmB,EAAmC,QAAU,gBAAkB,IAAIa,EAAAhC,EAAY,KAAK,cAAjB,MAAAgC,EAAiC,QAAU,cAAgB,EAAE,EACpNrB,EAAI,0BAA0B,EAC9BA,EAAI,sBAAuB,MAAc2B,EAAM,CAAC,EAChD9B,EAAI,MAAM,iBAAiB,QAASuC,CAAI,EACxCvC,EAAI,KAAK,iBAAiB,QAASuB,CAAW,EAC9CvB,EAAI,OAAO,iBAAiB,QAAS+B,CAAY,EACjD,MAAMhC,EAAM,OAAO,EACnB,MAAMwC,EAAK,CACb,CAEA,OAAO,OAASC", + "names": ["H", "db", "database", "table", "log", "msg", "open", "resolve", "request", "evt", "load", "faceDB", "cursor", "count", "store", "save", "faceRecord", "newRecord", "remove", "humanConfig", "matchOptions", "options", "ok", "allOk", "current", "blink", "human", "dom", "timestamp", "startTime", "log", "msg", "webCam", "cameraOptions", "stream", "ready", "resolve", "detectionLoop", "_a", "now", "drawValidationTests", "y", "key", "val", "el", "status", "validationLoop", "interpolated", "gestures", "gesture", "saveRecords", "_b", "_c", "_d", "image", "rec", "save", "count", "deleteRecord", "remove", "detectFace", "db", "load", "descriptors", "desc", "res", "main", "init"] } diff --git a/demo/faceid/index.ts b/demo/faceid/index.ts index fab341ee..522e3a9a 100644 --- a/demo/faceid/index.ts +++ b/demo/faceid/index.ts @@ -250,7 +250,7 @@ async function detectFace() { } const db = await indexDb.load(); const descriptors = db.map((rec) => rec.descriptor).filter((desc) => desc.length > 0); - const res = human.match(current.face.embedding, descriptors, matchOptions); + const res = human.match.find(current.face.embedding, descriptors, matchOptions); current.record = db[res.index] || null; if (current.record) { log(`best match: ${current.record.name} | id: ${current.record.id} | similarity: ${Math.round(1000 * res.similarity) / 10}%`); diff --git a/demo/facematch/README.md b/demo/facematch/README.md index 28add287..e61bc9ff 100644 --- a/demo/facematch/README.md +++ b/demo/facematch/README.md @@ -11,7 +11,7 @@ ## Browser Face Recognition Demo - `demo/facematch`: Demo for Browsers that uses all face description and embedding features to -detect, extract and identify all faces plus calculate simmilarity between them +detect, extract and identify all faces plus calculate similarity between them It highlights functionality such as: diff --git a/demo/facematch/facematch.js b/demo/facematch/facematch.js index ceac0522..b38d317a 100644 --- a/demo/facematch/facematch.js +++ b/demo/facematch/facematch.js @@ -1,7 +1,7 @@ /** * Human demo for browsers * - * Demo for face descriptor analysis and face simmilarity analysis + * Demo for face descriptor analysis and face similarity analysis */ /** @type {Human} */ @@ -70,6 +70,9 @@ async function SelectFaceCanvas(face) { document.getElementById('orig').style.filter = 'blur(16px)'; if (face.tensor) { title('Sorting Faces by Similarity'); + const c = document.getElementById('orig'); + await human.tf.browser.toPixels(face.tensor, c); + /* const enhanced = human.enhance(face); if (enhanced) { const c = document.getElementById('orig'); @@ -81,8 +84,9 @@ async function SelectFaceCanvas(face) { ctx.font = 'small-caps 0.4rem "Lato"'; ctx.fillStyle = 'rgba(255, 255, 255, 1)'; } + */ const arr = db.map((rec) => rec.embedding); - const res = await human.match(face.embedding, arr); + const res = await human.match.find(face.embedding, arr); log('Match:', db[res.index].name); const emotion = face.emotion[0] ? `${Math.round(100 * face.emotion[0].score)}% ${face.emotion[0].emotion}` : 'N/A'; document.getElementById('desc').innerHTML = ` @@ -103,7 +107,7 @@ async function SelectFaceCanvas(face) { for (const canvas of canvases) { // calculate similarity from selected face to current one in the loop const current = all[canvas.tag.sample][canvas.tag.face]; - const similarity = human.similarity(face.embedding, current.embedding); + const similarity = human.match.similarity(face.embedding, current.embedding); canvas.tag.similarity = similarity; // get best match // draw the canvas @@ -120,7 +124,7 @@ async function SelectFaceCanvas(face) { ctx.font = 'small-caps 1rem "Lato"'; const start = human.now(); const arr = db.map((rec) => rec.embedding); - const res = await human.match(current.embedding, arr); + const res = await human.match.find(current.embedding, arr); time += (human.now() - start); if (res.similarity > minScore) ctx.fillText(`DB: ${(100 * res.similarity).toFixed(1)}% ${db[res.index].name}`, 4, canvas.height - 30); } @@ -161,7 +165,7 @@ async function AddFaceCanvas(index, res, fileName) { ctx.fillStyle = 'rgba(255, 255, 255, 1)'; ctx.fillText(`${res.face[i].age}y ${(100 * (res.face[i].genderScore || 0)).toFixed(1)}% ${res.face[i].gender}`, 4, canvas.height - 6); const arr = db.map((rec) => rec.embedding); - const result = human.match(res.face[i].embedding, arr); + const result = human.match.find(res.face[i].embedding, arr); ctx.font = 'small-caps 1rem "Lato"'; if (result.similarity && res.similarity > minScore) ctx.fillText(`${(100 * result.similarity).toFixed(1)}% ${db[result.index].name}`, 4, canvas.height - 30); document.getElementById('faces').appendChild(canvas); @@ -256,7 +260,7 @@ async function main() { title(''); log('Ready'); human.validate(userConfig); - human.similarity([], []); + human.match.similarity([], []); } window.onload = main; diff --git a/demo/index.js b/demo/index.js index a497b01a..65fbdd66 100644 --- a/demo/index.js +++ b/demo/index.js @@ -222,21 +222,13 @@ async function calcSimmilarity(result) { compare.original = result; log('setting face compare baseline:', result.face[0]); if (result.face[0].tensor) { - const enhanced = human.enhance(result.face[0]); - if (enhanced) { - const c = document.getElementById('orig'); - const squeeze = human.tf.squeeze(enhanced); - const norm = human.tf.div(squeeze, 255); - human.tf.browser.toPixels(norm, c); - human.tf.dispose(enhanced); - human.tf.dispose(squeeze); - human.tf.dispose(norm); - } + const c = document.getElementById('orig'); + human.tf.browser.toPixels(result.face[0].tensor, c); } else { document.getElementById('compare-canvas').getContext('2d').drawImage(compare.original.canvas, 0, 0, 200, 200); } } - const similarity = human.similarity(compare.original.face[0].embedding, result.face[0].embedding); + const similarity = human.match.similarity(compare.original.face[0].embedding, result.face[0].embedding); document.getElementById('similarity').innerText = `similarity: ${Math.trunc(1000 * similarity) / 10}%`; } diff --git a/demo/nodejs/README.md b/demo/nodejs/README.md index 0ebd988d..e5ef81f7 100644 --- a/demo/nodejs/README.md +++ b/demo/nodejs/README.md @@ -82,7 +82,7 @@ node demo/nodejs/node.js detector: { modelPath: 'handdetect.json' }, skeleton: { modelPath: 'handskeleton.json' } }, - object: { enabled: true, modelPath: 'mb3-centernet.json', minConfidence: 0.2, iouThreshold: 0.4, maxDetected: 10, skipFrames: 19 } + object: { enabled: true, modelPath: 'centernet.json', minConfidence: 0.2, iouThreshold: 0.4, maxDetected: 10, skipFrames: 19 } } 08:52:15.673 Human: version: 2.0.0 08:52:15.674 Human: tfjs version: 3.6.0 @@ -96,7 +96,7 @@ node demo/nodejs/node.js 08:52:15.847 Human: load model: file://models/handdetect.json 08:52:15.847 Human: load model: file://models/handskeleton.json 08:52:15.914 Human: load model: file://models/movenet-lightning.json -08:52:15.957 Human: load model: file://models/mb3-centernet.json +08:52:15.957 Human: load model: file://models/centernet.json 08:52:16.015 Human: load model: file://models/faceres.json 08:52:16.015 Human: tf engine state: 50796152 bytes 1318 tensors 2021-06-01 08:52:16 INFO: Loaded: [ 'face', 'movenet', 'handpose', 'emotion', 'centernet', 'faceres', [length]: 6 ] diff --git a/demo/nodejs/node-similarity.js b/demo/nodejs/node-similarity.js index 84cdddcc..e9dbdf3d 100644 --- a/demo/nodejs/node-similarity.js +++ b/demo/nodejs/node-similarity.js @@ -57,7 +57,7 @@ async function main() { if (!res1 || !res1.face || res1.face.length === 0 || !res2 || !res2.face || res2.face.length === 0) { throw new Error('Could not detect face descriptors'); } - const similarity = human.similarity(res1.face[0].embedding, res2.face[0].embedding, { order: 2 }); + const similarity = human.match.similarity(res1.face[0].embedding, res2.face[0].embedding, { order: 2 }); log.data('Similarity: ', similarity); } diff --git a/demo/segmentation/index.js b/demo/segmentation/index.js index 1be1e907..7b9ba172 100644 --- a/demo/segmentation/index.js +++ b/demo/segmentation/index.js @@ -52,7 +52,7 @@ async function main() { log('platform:', human.env.platform, '| agent:', human.env.agent); await human.load(); // preload all models log('backend:', human.tf.getBackend(), '| available:', human.env.backends); - log('models stats:', human.getModelStats()); + log('models stats:', human.models.stats()); log('models loaded:', Object.values(human.models).filter((model) => model !== null).length); await human.warmup(); // warmup function to initialize backend for future faster detection const numTensors = human.tf.engine().state.numTensors; diff --git a/demo/typescript/index.js b/demo/typescript/index.js index 3962bf3c..0d696947 100644 --- a/demo/typescript/index.js +++ b/demo/typescript/index.js @@ -4,100 +4,6 @@ author: ' */ - -// demo/typescript/index.ts -import * as H from "../../dist/human.esm.js"; -var width = 1920; -var humanConfig = { - modelBasePath: "../../models", - filter: { enabled: true, equalization: false, flip: false, width }, - face: { enabled: true, detector: { rotation: true }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true }, antispoof: { enabled: true }, liveness: { enabled: true } }, - body: { enabled: true }, - hand: { enabled: false }, - object: { enabled: false }, - segmentation: { enabled: false }, - gesture: { enabled: true } -}; -var human = new H.Human(humanConfig); -human.env.perfadd = false; -human.draw.options.font = 'small-caps 18px "Lato"'; -human.draw.options.lineHeight = 20; -var dom = { - video: document.getElementById("video"), - canvas: document.getElementById("canvas"), - log: document.getElementById("log"), - fps: document.getElementById("status"), - perf: document.getElementById("performance") -}; -var timestamp = { detect: 0, draw: 0, tensors: 0, start: 0 }; -var fps = { detectFPS: 0, drawFPS: 0, frames: 0, averageMs: 0 }; -var log = (...msg) => { - dom.log.innerText += msg.join(" ") + "\n"; - console.log(...msg); -}; -var status = (msg) => dom.fps.innerText = msg; -var perf = (msg) => dom.perf.innerText = "tensors:" + human.tf.memory().numTensors.toString() + " | performance: " + JSON.stringify(msg).replace(/"|{|}/g, "").replace(/,/g, " | "); -async function detectionLoop() { - if (!dom.video.paused) { - if (timestamp.start === 0) - timestamp.start = human.now(); - await human.detect(dom.video); - const tensors = human.tf.memory().numTensors; - if (tensors - timestamp.tensors !== 0) - log("allocated tensors:", tensors - timestamp.tensors); - timestamp.tensors = tensors; - fps.detectFPS = Math.round(1e3 * 1e3 / (human.now() - timestamp.detect)) / 1e3; - fps.frames++; - fps.averageMs = Math.round(1e3 * (human.now() - timestamp.start) / fps.frames) / 1e3; - if (fps.frames % 100 === 0 && !dom.video.paused) - log("performance", { ...fps, tensors: timestamp.tensors }); - } - timestamp.detect = human.now(); - requestAnimationFrame(detectionLoop); -} -async function drawLoop() { - var _a, _b, _c; - if (!dom.video.paused) { - const interpolated = human.next(human.result); - const processed = await human.image(dom.video); - human.draw.canvas(processed.canvas, dom.canvas); - const opt = { bodyLabels: `person confidence [score] and ${(_c = (_b = (_a = human.result) == null ? void 0 : _a.body) == null ? void 0 : _b[0]) == null ? void 0 : _c.keypoints.length} keypoints` }; - await human.draw.all(dom.canvas, interpolated, opt); - perf(interpolated.performance); - } - const now = human.now(); - fps.drawFPS = Math.round(1e3 * 1e3 / (now - timestamp.draw)) / 1e3; - timestamp.draw = now; - status(dom.video.paused ? "paused" : `fps: ${fps.detectFPS.toFixed(1).padStart(5, " ")} detect | ${fps.drawFPS.toFixed(1).padStart(5, " ")} draw`); - setTimeout(drawLoop, 30); -} -async function webCam() { - const devices = await human.webcam.enumerate(); - const id = devices[0].deviceId; - await human.webcam.start({ element: dom.video, crop: true, width, id }); - dom.canvas.width = human.webcam.width; - dom.canvas.height = human.webcam.height; - dom.canvas.onclick = async () => { - if (human.webcam.paused) - await human.webcam.play(); - else - human.webcam.pause(); - }; -} -async function main() { - log("human version:", human.version, "| tfjs version:", human.tf.version["tfjs-core"]); - log("platform:", human.env.platform, "| agent:", human.env.agent); - status("loading..."); - await human.load(); - log("backend:", human.tf.getBackend(), "| available:", human.env.backends); - log("models stats:", human.getModelStats()); - log("models loaded:", Object.values(human.models).filter((model) => model !== null).length); - log("environment", human.env); - status("initializing..."); - await human.warmup(); - await webCam(); - await detectionLoop(); - await drawLoop(); -} -window.onload = main; +import*as m from"../../dist/human.esm.js";var f=1920,b={modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1,width:f},face:{enabled:!0,detector:{rotation:!0},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!0},hand:{enabled:!1},object:{enabled:!1},segmentation:{enabled:!1},gesture:{enabled:!0}},e=new m.Human(b);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;var a={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},s={detectFPS:0,drawFPS:0,frames:0,averageMs:0},o=(...t)=>{a.log.innerText+=t.join(" ")+` +`,console.log(...t)},r=t=>a.fps.innerText=t,g=t=>a.perf.innerText="tensors:"+e.tf.memory().numTensors.toString()+" | performance: "+JSON.stringify(t).replace(/"|{|}/g,"").replace(/,/g," | ");async function u(){if(!a.video.paused){n.start===0&&(n.start=e.now()),await e.detect(a.video);let t=e.tf.memory().numTensors;t-n.tensors!==0&&o("allocated tensors:",t-n.tensors),n.tensors=t,s.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,s.frames++,s.averageMs=Math.round(1e3*(e.now()-n.start)/s.frames)/1e3,s.frames%100===0&&!a.video.paused&&o("performance",{...s,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(u)}async function p(){var d,i,c;if(!a.video.paused){let l=e.next(e.result),w=await e.image(a.video);e.draw.canvas(w.canvas,a.canvas);let v={bodyLabels:`person confidence [score] and ${(c=(i=(d=e.result)==null?void 0:d.body)==null?void 0:i[0])==null?void 0:c.keypoints.length} keypoints`};await e.draw.all(a.canvas,l,v),g(l.performance)}let t=e.now();s.drawFPS=Math.round(1e3*1e3/(t-n.draw))/1e3,n.draw=t,r(a.video.paused?"paused":`fps: ${s.detectFPS.toFixed(1).padStart(5," ")} detect | ${s.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(p,30)}async function h(){let d=(await e.webcam.enumerate())[0].deviceId;await e.webcam.start({element:a.video,crop:!0,width:f,id:d}),a.canvas.width=e.webcam.width,a.canvas.height=e.webcam.height,a.canvas.onclick=async()=>{e.webcam.paused?await e.webcam.play():e.webcam.pause()}}async function y(){o("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),o("platform:",e.env.platform,"| agent:",e.env.agent),r("loading..."),await e.load(),o("backend:",e.tf.getBackend(),"| available:",e.env.backends),o("models stats:",e.models.stats()),o("models loaded:",Object.values(e.models).filter(t=>t!==null).length),o("environment",e.env),r("initializing..."),await e.warmup(),await h(),await u(),await p()}window.onload=y; //# sourceMappingURL=index.js.map diff --git a/demo/typescript/index.js.map b/demo/typescript/index.js.map index d036388f..e5987da1 100644 --- a/demo/typescript/index.js.map +++ b/demo/typescript/index.js.map @@ -1,7 +1,7 @@ { "version": 3, "sources": ["index.ts"], - "sourcesContent": ["/**\n * Human demo for browsers\n * @default Human Library\n * @summary \n * @author \n * @copyright \n * @license MIT\n */\n\nimport * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human\n\nconst width = 1920; // used by webcam config as well as human maximum resultion // can be anything, but resolutions higher than 4k will disable internal optimizations\n\nconst humanConfig: Partial = { // user configuration for human, used to fine-tune behavior\n // backend: 'webgpu',\n modelBasePath: '../../models',\n filter: { enabled: true, equalization: false, flip: false, width },\n face: { enabled: true, detector: { rotation: true }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true }, antispoof: { enabled: true }, liveness: { enabled: true } },\n body: { enabled: true },\n // hand: { enabled: true },\n hand: { enabled: false },\n object: { enabled: false },\n segmentation: { enabled: false },\n gesture: { enabled: true },\n};\n\nconst human = new H.Human(humanConfig); // create instance of human with overrides from user configuration\n\nhuman.env.perfadd = false; // is performance data showing instant or total values\nhuman.draw.options.font = 'small-caps 18px \"Lato\"'; // set font used to draw labels when using draw methods\nhuman.draw.options.lineHeight = 20;\n// human.draw.options.fillPolygons = true;\n\nconst dom = { // grab instances of dom objects so we dont have to look them up later\n video: document.getElementById('video') as HTMLVideoElement,\n canvas: document.getElementById('canvas') as HTMLCanvasElement,\n log: document.getElementById('log') as HTMLPreElement,\n fps: document.getElementById('status') as HTMLPreElement,\n perf: document.getElementById('performance') as HTMLDivElement,\n};\nconst timestamp = { detect: 0, draw: 0, tensors: 0, start: 0 }; // holds information used to calculate performance and possible memory leaks\nconst fps = { detectFPS: 0, drawFPS: 0, frames: 0, averageMs: 0 }; // holds calculated fps information for both detect and screen refresh\n\nconst log = (...msg) => { // helper method to output messages\n dom.log.innerText += msg.join(' ') + '\\n';\n console.log(...msg); // eslint-disable-line no-console\n};\nconst status = (msg) => dom.fps.innerText = msg; // print status element\nconst perf = (msg) => dom.perf.innerText = 'tensors:' + human.tf.memory().numTensors.toString() + ' | performance: ' + JSON.stringify(msg).replace(/\"|{|}/g, '').replace(/,/g, ' | '); // print performance element\n\nasync function detectionLoop() { // main detection loop\n if (!dom.video.paused) {\n if (timestamp.start === 0) timestamp.start = human.now();\n // log('profiling data:', await human.profile(dom.video));\n await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result\n const tensors = human.tf.memory().numTensors; // check current tensor usage for memory leaks\n if (tensors - timestamp.tensors !== 0) log('allocated tensors:', tensors - timestamp.tensors); // printed on start and each time there is a tensor leak\n timestamp.tensors = tensors;\n fps.detectFPS = Math.round(1000 * 1000 / (human.now() - timestamp.detect)) / 1000;\n fps.frames++;\n fps.averageMs = Math.round(1000 * (human.now() - timestamp.start) / fps.frames) / 1000;\n if (fps.frames % 100 === 0 && !dom.video.paused) log('performance', { ...fps, tensors: timestamp.tensors });\n }\n timestamp.detect = human.now();\n requestAnimationFrame(detectionLoop); // start new frame immediately\n}\n\nasync function drawLoop() { // main screen refresh loop\n if (!dom.video.paused) {\n const interpolated = human.next(human.result); // smoothen result using last-known results\n const processed = await human.image(dom.video); // get current video frame, but enhanced with human.filters\n human.draw.canvas(processed.canvas as HTMLCanvasElement, dom.canvas);\n\n const opt: Partial = { bodyLabels: `person confidence [score] and ${human.result?.body?.[0]?.keypoints.length} keypoints` };\n await human.draw.all(dom.canvas, interpolated, opt); // draw labels, boxes, lines, etc.\n perf(interpolated.performance); // write performance data\n }\n const now = human.now();\n fps.drawFPS = Math.round(1000 * 1000 / (now - timestamp.draw)) / 1000;\n timestamp.draw = now;\n status(dom.video.paused ? 'paused' : `fps: ${fps.detectFPS.toFixed(1).padStart(5, ' ')} detect | ${fps.drawFPS.toFixed(1).padStart(5, ' ')} draw`); // write status\n setTimeout(drawLoop, 30); // use to slow down refresh from max refresh rate to target of 30 fps\n}\n\nasync function webCam() {\n const devices = await human.webcam.enumerate();\n const id = devices[0].deviceId; // use first available video source\n await human.webcam.start({ element: dom.video, crop: true, width, id }); // use human webcam helper methods and associate webcam stream with a dom element\n dom.canvas.width = human.webcam.width;\n dom.canvas.height = human.webcam.height;\n dom.canvas.onclick = async () => { // pause when clicked on screen and resume on next click\n if (human.webcam.paused) await human.webcam.play();\n else human.webcam.pause();\n };\n}\n\nasync function main() { // main entry point\n log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']);\n log('platform:', human.env.platform, '| agent:', human.env.agent);\n status('loading...');\n await human.load(); // preload all models\n log('backend:', human.tf.getBackend(), '| available:', human.env.backends);\n log('models stats:', human.getModelStats());\n log('models loaded:', Object.values(human.models).filter((model) => model !== null).length);\n log('environment', human.env);\n status('initializing...');\n await human.warmup(); // warmup function to initialize backend for future faster detection\n await webCam(); // start webcam\n await detectionLoop(); // start detection loop\n await drawLoop(); // start draw loop\n}\n\nwindow.onload = main;\n"], - "mappings": ";;;;;;;;AASA,YAAY,OAAO;AAEnB,IAAM,QAAQ;AAEd,IAAM,cAAiC;AAAA,EAErC,eAAe;AAAA,EACf,QAAQ,EAAE,SAAS,MAAM,cAAc,OAAO,MAAM,OAAO,MAAM;AAAA,EACjE,MAAM,EAAE,SAAS,MAAM,UAAU,EAAE,UAAU,KAAK,GAAG,MAAM,EAAE,SAAS,KAAK,GAAG,WAAW,EAAE,SAAS,MAAM,GAAG,MAAM,EAAE,SAAS,KAAK,GAAG,aAAa,EAAE,SAAS,KAAK,GAAG,SAAS,EAAE,SAAS,KAAK,GAAG,WAAW,EAAE,SAAS,KAAK,GAAG,UAAU,EAAE,SAAS,KAAK,EAAE;AAAA,EAC5P,MAAM,EAAE,SAAS,KAAK;AAAA,EAEtB,MAAM,EAAE,SAAS,MAAM;AAAA,EACvB,QAAQ,EAAE,SAAS,MAAM;AAAA,EACzB,cAAc,EAAE,SAAS,MAAM;AAAA,EAC/B,SAAS,EAAE,SAAS,KAAK;AAC3B;AAEA,IAAM,QAAQ,IAAM,QAAM,WAAW;AAErC,MAAM,IAAI,UAAU;AACpB,MAAM,KAAK,QAAQ,OAAO;AAC1B,MAAM,KAAK,QAAQ,aAAa;AAGhC,IAAM,MAAM;AAAA,EACV,OAAO,SAAS,eAAe,OAAO;AAAA,EACtC,QAAQ,SAAS,eAAe,QAAQ;AAAA,EACxC,KAAK,SAAS,eAAe,KAAK;AAAA,EAClC,KAAK,SAAS,eAAe,QAAQ;AAAA,EACrC,MAAM,SAAS,eAAe,aAAa;AAC7C;AACA,IAAM,YAAY,EAAE,QAAQ,GAAG,MAAM,GAAG,SAAS,GAAG,OAAO,EAAE;AAC7D,IAAM,MAAM,EAAE,WAAW,GAAG,SAAS,GAAG,QAAQ,GAAG,WAAW,EAAE;AAEhE,IAAM,MAAM,IAAI,QAAQ;AACtB,MAAI,IAAI,aAAa,IAAI,KAAK,GAAG,IAAI;AACrC,UAAQ,IAAI,GAAG,GAAG;AACpB;AACA,IAAM,SAAS,CAAC,QAAQ,IAAI,IAAI,YAAY;AAC5C,IAAM,OAAO,CAAC,QAAQ,IAAI,KAAK,YAAY,aAAa,MAAM,GAAG,OAAO,EAAE,WAAW,SAAS,IAAI,qBAAqB,KAAK,UAAU,GAAG,EAAE,QAAQ,UAAU,EAAE,EAAE,QAAQ,MAAM,KAAK;AAEpL,eAAe,gBAAgB;AAC7B,MAAI,CAAC,IAAI,MAAM,QAAQ;AACrB,QAAI,UAAU,UAAU;AAAG,gBAAU,QAAQ,MAAM,IAAI;AAEvD,UAAM,MAAM,OAAO,IAAI,KAAK;AAC5B,UAAM,UAAU,MAAM,GAAG,OAAO,EAAE;AAClC,QAAI,UAAU,UAAU,YAAY;AAAG,UAAI,sBAAsB,UAAU,UAAU,OAAO;AAC5F,cAAU,UAAU;AACpB,QAAI,YAAY,KAAK,MAAM,MAAO,OAAQ,MAAM,IAAI,IAAI,UAAU,OAAO,IAAI;AAC7E,QAAI;AACJ,QAAI,YAAY,KAAK,MAAM,OAAQ,MAAM,IAAI,IAAI,UAAU,SAAS,IAAI,MAAM,IAAI;AAClF,QAAI,IAAI,SAAS,QAAQ,KAAK,CAAC,IAAI,MAAM;AAAQ,UAAI,eAAe,EAAE,GAAG,KAAK,SAAS,UAAU,QAAQ,CAAC;AAAA,EAC5G;AACA,YAAU,SAAS,MAAM,IAAI;AAC7B,wBAAsB,aAAa;AACrC;AAEA,eAAe,WAAW;AAnE1B;AAoEE,MAAI,CAAC,IAAI,MAAM,QAAQ;AACrB,UAAM,eAAe,MAAM,KAAK,MAAM,MAAM;AAC5C,UAAM,YAAY,MAAM,MAAM,MAAM,IAAI,KAAK;AAC7C,UAAM,KAAK,OAAO,UAAU,QAA6B,IAAI,MAAM;AAEnE,UAAM,MAA8B,EAAE,YAAY,kCAAiC,uBAAM,WAAN,mBAAc,SAAd,mBAAqB,OAArB,mBAAyB,UAAU,mBAAmB;AACzI,UAAM,MAAM,KAAK,IAAI,IAAI,QAAQ,cAAc,GAAG;AAClD,SAAK,aAAa,WAAW;AAAA,EAC/B;AACA,QAAM,MAAM,MAAM,IAAI;AACtB,MAAI,UAAU,KAAK,MAAM,MAAO,OAAQ,MAAM,UAAU,KAAK,IAAI;AACjE,YAAU,OAAO;AACjB,SAAO,IAAI,MAAM,SAAS,WAAW,QAAQ,IAAI,UAAU,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG,cAAc,IAAI,QAAQ,QAAQ,CAAC,EAAE,SAAS,GAAG,GAAG,QAAQ;AACjJ,aAAW,UAAU,EAAE;AACzB;AAEA,eAAe,SAAS;AACtB,QAAM,UAAU,MAAM,MAAM,OAAO,UAAU;AAC7C,QAAM,KAAK,QAAQ,GAAG;AACtB,QAAM,MAAM,OAAO,MAAM,EAAE,SAAS,IAAI,OAAO,MAAM,MAAM,OAAO,GAAG,CAAC;AACtE,MAAI,OAAO,QAAQ,MAAM,OAAO;AAChC,MAAI,OAAO,SAAS,MAAM,OAAO;AACjC,MAAI,OAAO,UAAU,YAAY;AAC/B,QAAI,MAAM,OAAO;AAAQ,YAAM,MAAM,OAAO,KAAK;AAAA;AAC5C,YAAM,OAAO,MAAM;AAAA,EAC1B;AACF;AAEA,eAAe,OAAO;AACpB,MAAI,kBAAkB,MAAM,SAAS,mBAAmB,MAAM,GAAG,QAAQ,YAAY;AACrF,MAAI,aAAa,MAAM,IAAI,UAAU,YAAY,MAAM,IAAI,KAAK;AAChE,SAAO,YAAY;AACnB,QAAM,MAAM,KAAK;AACjB,MAAI,YAAY,MAAM,GAAG,WAAW,GAAG,gBAAgB,MAAM,IAAI,QAAQ;AACzE,MAAI,iBAAiB,MAAM,cAAc,CAAC;AAC1C,MAAI,kBAAkB,OAAO,OAAO,MAAM,MAAM,EAAE,OAAO,CAAC,UAAU,UAAU,IAAI,EAAE,MAAM;AAC1F,MAAI,eAAe,MAAM,GAAG;AAC5B,SAAO,iBAAiB;AACxB,QAAM,MAAM,OAAO;AACnB,QAAM,OAAO;AACb,QAAM,cAAc;AACpB,QAAM,SAAS;AACjB;AAEA,OAAO,SAAS;", - "names": [] + "sourcesContent": ["/**\n * Human demo for browsers\n * @default Human Library\n * @summary \n * @author \n * @copyright \n * @license MIT\n */\n\nimport * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human\n\nconst width = 1920; // used by webcam config as well as human maximum resultion // can be anything, but resolutions higher than 4k will disable internal optimizations\n\nconst humanConfig: Partial = { // user configuration for human, used to fine-tune behavior\n // backend: 'webgpu',\n modelBasePath: '../../models',\n filter: { enabled: true, equalization: false, flip: false, width },\n face: { enabled: true, detector: { rotation: true }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true }, antispoof: { enabled: true }, liveness: { enabled: true } },\n body: { enabled: true },\n // hand: { enabled: true },\n hand: { enabled: false },\n object: { enabled: false },\n segmentation: { enabled: false },\n gesture: { enabled: true },\n};\n\nconst human = new H.Human(humanConfig); // create instance of human with overrides from user configuration\n\nhuman.env.perfadd = false; // is performance data showing instant or total values\nhuman.draw.options.font = 'small-caps 18px \"Lato\"'; // set font used to draw labels when using draw methods\nhuman.draw.options.lineHeight = 20;\n// human.draw.options.fillPolygons = true;\n\nconst dom = { // grab instances of dom objects so we dont have to look them up later\n video: document.getElementById('video') as HTMLVideoElement,\n canvas: document.getElementById('canvas') as HTMLCanvasElement,\n log: document.getElementById('log') as HTMLPreElement,\n fps: document.getElementById('status') as HTMLPreElement,\n perf: document.getElementById('performance') as HTMLDivElement,\n};\nconst timestamp = { detect: 0, draw: 0, tensors: 0, start: 0 }; // holds information used to calculate performance and possible memory leaks\nconst fps = { detectFPS: 0, drawFPS: 0, frames: 0, averageMs: 0 }; // holds calculated fps information for both detect and screen refresh\n\nconst log = (...msg) => { // helper method to output messages\n dom.log.innerText += msg.join(' ') + '\\n';\n console.log(...msg); // eslint-disable-line no-console\n};\nconst status = (msg) => dom.fps.innerText = msg; // print status element\nconst perf = (msg) => dom.perf.innerText = 'tensors:' + human.tf.memory().numTensors.toString() + ' | performance: ' + JSON.stringify(msg).replace(/\"|{|}/g, '').replace(/,/g, ' | '); // print performance element\n\nasync function detectionLoop() { // main detection loop\n if (!dom.video.paused) {\n if (timestamp.start === 0) timestamp.start = human.now();\n // log('profiling data:', await human.profile(dom.video));\n await human.detect(dom.video); // actual detection; were not capturing output in a local variable as it can also be reached via human.result\n const tensors = human.tf.memory().numTensors; // check current tensor usage for memory leaks\n if (tensors - timestamp.tensors !== 0) log('allocated tensors:', tensors - timestamp.tensors); // printed on start and each time there is a tensor leak\n timestamp.tensors = tensors;\n fps.detectFPS = Math.round(1000 * 1000 / (human.now() - timestamp.detect)) / 1000;\n fps.frames++;\n fps.averageMs = Math.round(1000 * (human.now() - timestamp.start) / fps.frames) / 1000;\n if (fps.frames % 100 === 0 && !dom.video.paused) log('performance', { ...fps, tensors: timestamp.tensors });\n }\n timestamp.detect = human.now();\n requestAnimationFrame(detectionLoop); // start new frame immediately\n}\n\nasync function drawLoop() { // main screen refresh loop\n if (!dom.video.paused) {\n const interpolated = human.next(human.result); // smoothen result using last-known results\n const processed = await human.image(dom.video); // get current video frame, but enhanced with human.filters\n human.draw.canvas(processed.canvas as HTMLCanvasElement, dom.canvas);\n\n const opt: Partial = { bodyLabels: `person confidence [score] and ${human.result?.body?.[0]?.keypoints.length} keypoints` };\n await human.draw.all(dom.canvas, interpolated, opt); // draw labels, boxes, lines, etc.\n perf(interpolated.performance); // write performance data\n }\n const now = human.now();\n fps.drawFPS = Math.round(1000 * 1000 / (now - timestamp.draw)) / 1000;\n timestamp.draw = now;\n status(dom.video.paused ? 'paused' : `fps: ${fps.detectFPS.toFixed(1).padStart(5, ' ')} detect | ${fps.drawFPS.toFixed(1).padStart(5, ' ')} draw`); // write status\n setTimeout(drawLoop, 30); // use to slow down refresh from max refresh rate to target of 30 fps\n}\n\nasync function webCam() {\n const devices = await human.webcam.enumerate();\n const id = devices[0].deviceId; // use first available video source\n await human.webcam.start({ element: dom.video, crop: true, width, id }); // use human webcam helper methods and associate webcam stream with a dom element\n dom.canvas.width = human.webcam.width;\n dom.canvas.height = human.webcam.height;\n dom.canvas.onclick = async () => { // pause when clicked on screen and resume on next click\n if (human.webcam.paused) await human.webcam.play();\n else human.webcam.pause();\n };\n}\n\nasync function main() { // main entry point\n log('human version:', human.version, '| tfjs version:', human.tf.version['tfjs-core']);\n log('platform:', human.env.platform, '| agent:', human.env.agent);\n status('loading...');\n await human.load(); // preload all models\n log('backend:', human.tf.getBackend(), '| available:', human.env.backends);\n log('models stats:', human.models.stats());\n log('models loaded:', Object.values(human.models).filter((model) => model !== null).length);\n log('environment', human.env);\n status('initializing...');\n await human.warmup(); // warmup function to initialize backend for future faster detection\n await webCam(); // start webcam\n await detectionLoop(); // start detection loop\n await drawLoop(); // start draw loop\n}\n\nwindow.onload = main;\n"], + "mappings": ";;;;;;AASA,UAAYA,MAAO,0BAEnB,IAAMC,EAAQ,KAERC,EAAiC,CAErC,cAAe,eACf,OAAQ,CAAE,QAAS,GAAM,aAAc,GAAO,KAAM,GAAO,MAAAD,CAAM,EACjE,KAAM,CAAE,QAAS,GAAM,SAAU,CAAE,SAAU,EAAK,EAAG,KAAM,CAAE,QAAS,EAAK,EAAG,UAAW,CAAE,QAAS,EAAM,EAAG,KAAM,CAAE,QAAS,EAAK,EAAG,YAAa,CAAE,QAAS,EAAK,EAAG,QAAS,CAAE,QAAS,EAAK,EAAG,UAAW,CAAE,QAAS,EAAK,EAAG,SAAU,CAAE,QAAS,EAAK,CAAE,EAC5P,KAAM,CAAE,QAAS,EAAK,EAEtB,KAAM,CAAE,QAAS,EAAM,EACvB,OAAQ,CAAE,QAAS,EAAM,EACzB,aAAc,CAAE,QAAS,EAAM,EAC/B,QAAS,CAAE,QAAS,EAAK,CAC3B,EAEME,EAAQ,IAAM,QAAMD,CAAW,EAErCC,EAAM,IAAI,QAAU,GACpBA,EAAM,KAAK,QAAQ,KAAO,yBAC1BA,EAAM,KAAK,QAAQ,WAAa,GAGhC,IAAMC,EAAM,CACV,MAAO,SAAS,eAAe,OAAO,EACtC,OAAQ,SAAS,eAAe,QAAQ,EACxC,IAAK,SAAS,eAAe,KAAK,EAClC,IAAK,SAAS,eAAe,QAAQ,EACrC,KAAM,SAAS,eAAe,aAAa,CAC7C,EACMC,EAAY,CAAE,OAAQ,EAAG,KAAM,EAAG,QAAS,EAAG,MAAO,CAAE,EACvDC,EAAM,CAAE,UAAW,EAAG,QAAS,EAAG,OAAQ,EAAG,UAAW,CAAE,EAE1DC,EAAM,IAAIC,IAAQ,CACtBJ,EAAI,IAAI,WAAaI,EAAI,KAAK,GAAG,EAAI;AAAA,EACrC,QAAQ,IAAI,GAAGA,CAAG,CACpB,EACMC,EAAUD,GAAQJ,EAAI,IAAI,UAAYI,EACtCE,EAAQF,GAAQJ,EAAI,KAAK,UAAY,WAAaD,EAAM,GAAG,OAAO,EAAE,WAAW,SAAS,EAAI,mBAAqB,KAAK,UAAUK,CAAG,EAAE,QAAQ,SAAU,EAAE,EAAE,QAAQ,KAAM,KAAK,EAEpL,eAAeG,GAAgB,CAC7B,GAAI,CAACP,EAAI,MAAM,OAAQ,CACjBC,EAAU,QAAU,IAAGA,EAAU,MAAQF,EAAM,IAAI,GAEvD,MAAMA,EAAM,OAAOC,EAAI,KAAK,EAC5B,IAAMQ,EAAUT,EAAM,GAAG,OAAO,EAAE,WAC9BS,EAAUP,EAAU,UAAY,GAAGE,EAAI,qBAAsBK,EAAUP,EAAU,OAAO,EAC5FA,EAAU,QAAUO,EACpBN,EAAI,UAAY,KAAK,MAAM,IAAO,KAAQH,EAAM,IAAI,EAAIE,EAAU,OAAO,EAAI,IAC7EC,EAAI,SACJA,EAAI,UAAY,KAAK,MAAM,KAAQH,EAAM,IAAI,EAAIE,EAAU,OAASC,EAAI,MAAM,EAAI,IAC9EA,EAAI,OAAS,MAAQ,GAAK,CAACF,EAAI,MAAM,QAAQG,EAAI,cAAe,CAAE,GAAGD,EAAK,QAASD,EAAU,OAAQ,CAAC,CAC5G,CACAA,EAAU,OAASF,EAAM,IAAI,EAC7B,sBAAsBQ,CAAa,CACrC,CAEA,eAAeE,GAAW,CAnE1B,IAAAC,EAAAC,EAAAC,EAoEE,GAAI,CAACZ,EAAI,MAAM,OAAQ,CACrB,IAAMa,EAAed,EAAM,KAAKA,EAAM,MAAM,EACtCe,EAAY,MAAMf,EAAM,MAAMC,EAAI,KAAK,EAC7CD,EAAM,KAAK,OAAOe,EAAU,OAA6Bd,EAAI,MAAM,EAEnE,IAAMe,EAA8B,CAAE,WAAY,kCAAiCH,GAAAD,GAAAD,EAAAX,EAAM,SAAN,YAAAW,EAAc,OAAd,YAAAC,EAAqB,KAArB,YAAAC,EAAyB,UAAU,kBAAmB,EACzI,MAAMb,EAAM,KAAK,IAAIC,EAAI,OAAQa,EAAcE,CAAG,EAClDT,EAAKO,EAAa,WAAW,CAC/B,CACA,IAAMG,EAAMjB,EAAM,IAAI,EACtBG,EAAI,QAAU,KAAK,MAAM,IAAO,KAAQc,EAAMf,EAAU,KAAK,EAAI,IACjEA,EAAU,KAAOe,EACjBX,EAAOL,EAAI,MAAM,OAAS,SAAW,QAAQE,EAAI,UAAU,QAAQ,CAAC,EAAE,SAAS,EAAG,GAAG,cAAcA,EAAI,QAAQ,QAAQ,CAAC,EAAE,SAAS,EAAG,GAAG,QAAQ,EACjJ,WAAWO,EAAU,EAAE,CACzB,CAEA,eAAeQ,GAAS,CAEtB,IAAMC,GADU,MAAMnB,EAAM,OAAO,UAAU,GAC1B,GAAG,SACtB,MAAMA,EAAM,OAAO,MAAM,CAAE,QAASC,EAAI,MAAO,KAAM,GAAM,MAAAH,EAAO,GAAAqB,CAAG,CAAC,EACtElB,EAAI,OAAO,MAAQD,EAAM,OAAO,MAChCC,EAAI,OAAO,OAASD,EAAM,OAAO,OACjCC,EAAI,OAAO,QAAU,SAAY,CAC3BD,EAAM,OAAO,OAAQ,MAAMA,EAAM,OAAO,KAAK,EAC5CA,EAAM,OAAO,MAAM,CAC1B,CACF,CAEA,eAAeoB,GAAO,CACpBhB,EAAI,iBAAkBJ,EAAM,QAAS,kBAAmBA,EAAM,GAAG,QAAQ,YAAY,EACrFI,EAAI,YAAaJ,EAAM,IAAI,SAAU,WAAYA,EAAM,IAAI,KAAK,EAChEM,EAAO,YAAY,EACnB,MAAMN,EAAM,KAAK,EACjBI,EAAI,WAAYJ,EAAM,GAAG,WAAW,EAAG,eAAgBA,EAAM,IAAI,QAAQ,EACzEI,EAAI,gBAAiBJ,EAAM,OAAO,MAAM,CAAC,EACzCI,EAAI,iBAAkB,OAAO,OAAOJ,EAAM,MAAM,EAAE,OAAQqB,GAAUA,IAAU,IAAI,EAAE,MAAM,EAC1FjB,EAAI,cAAeJ,EAAM,GAAG,EAC5BM,EAAO,iBAAiB,EACxB,MAAMN,EAAM,OAAO,EACnB,MAAMkB,EAAO,EACb,MAAMV,EAAc,EACpB,MAAME,EAAS,CACjB,CAEA,OAAO,OAASU", + "names": ["H", "width", "humanConfig", "human", "dom", "timestamp", "fps", "log", "msg", "status", "perf", "detectionLoop", "tensors", "drawLoop", "_a", "_b", "_c", "interpolated", "processed", "opt", "now", "webCam", "id", "main", "model"] } diff --git a/demo/typescript/index.ts b/demo/typescript/index.ts index ebf5a3f9..ed53b09b 100644 --- a/demo/typescript/index.ts +++ b/demo/typescript/index.ts @@ -100,7 +100,7 @@ async function main() { // main entry point status('loading...'); await human.load(); // preload all models log('backend:', human.tf.getBackend(), '| available:', human.env.backends); - log('models stats:', human.getModelStats()); + log('models stats:', human.models.stats()); log('models loaded:', Object.values(human.models).filter((model) => model !== null).length); log('environment', human.env); status('initializing...'); diff --git a/models/centernet.bin b/models/centernet.bin new file mode 100644 index 00000000..74771713 Binary files /dev/null and b/models/centernet.bin differ diff --git a/models/centernet.json b/models/centernet.json new file mode 100644 index 00000000..31c0b934 --- /dev/null +++ b/models/centernet.json @@ -0,0 +1,577 @@ +{ + "format": "graph-model", + "generatedBy": "https://github.com/610265158/mobilenetv3_centernet", + "convertedBy": "https://github.com/vladmandic", + "signature": + { + "inputs": + { + "tower_0/images": {"name":"tower_0/images","dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"512"},{"size":"512"},{"size":"3"}]}} + }, + "outputs": + { + "tower_0/wh": {"name":"tower_0/wh","dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"128"},{"size":"128"},{"size":"4"}]}}, + "tower_0/keypoints": {"name":"tower_0/keypoints","dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"128"},{"size":"128"},{"size":"80"}]}}, + "tower_0/detections": {"name":"tower_0/detections","dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"100"},{"size":"6"}]}} + } + }, + "modelTopology": + { + "node": + [ + {"name":"tower_0/mul_3/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/strided_slice_9/stack","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"3"}]}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/strided_slice_9/stack_1","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"3"}]}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/strided_slice_9/stack_2","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"3"}]}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/mul_4/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/strided_slice_10/stack","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"3"}]}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/strided_slice_10/stack_1","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"3"}]}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/strided_slice_10/stack_2","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"3"}]}}}}}, + {"name":"tower_0/ExpandDims/dim","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/mul_5/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/strided_slice_11/stack","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"3"}]}}}}}, + {"name":"tower_0/strided_slice_11/stack_1","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"3"}]}}}}}, + {"name":"tower_0/strided_slice_11/stack_2","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"3"}]}}}}}, + {"name":"tower_0/ExpandDims_1/dim","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/mul_6/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/BatchGather/concat","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}}}}, + {"name":"tower_0/strided_slice_6","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{}}}}}, + {"name":"tower_0/BatchGather/Reshape_1/shape","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"1"}]}}}}}, + {"name":"tower_0/BatchGather/GatherV2/axis","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/BatchGather/concat_1","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"3"}]}}}}}, + {"name":"tower_0/strided_slice_12/stack","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"3"}]}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/strided_slice_12/stack_1","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"3"}]}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/strided_slice_12/stack_2","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"3"}]}}}}}, + {"name":"tower_0/ExpandDims_3/dim","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/Reshape/shape","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/TopKV2/k","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/strided_slice_7","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{}}}}}, + {"name":"tower_0/ExpandDims_2/dim","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/concat_1/axis","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"CenternetHead/centernet_cls_output/depthwise_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"128"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"CenternetHead/centernet_cls_output/pointwise_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"128"},{"size":"80"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"CenternetHead/centernet_cls_output/biases","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"80"}]}}}}}, + {"name":"CenternetHead/c2_reverse/3x3/depthwise_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"5"},{"size":"5"},{"size":"18"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"CenternetHead/c3_upsample/branch_x_upsample_resize/depthwise_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"192"},{"size":"1"}]}}}}}, + {"name":"CenternetHead/c3_reverse/3x3/depthwise_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"5"},{"size":"5"},{"size":"24"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"CenternetHead/c4_upsample/branch_x_upsample_resize/depthwise_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"256"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"CenternetHead/c4_reverse/3x3/depthwise_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"5"},{"size":"5"},{"size":"22"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"CenternetHead/c5_upsample/branch_x_upsample_resize/depthwise_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"480"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/extern1/hard_swish/mul_1/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/mul/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Mean/reduction_indices","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"MobilenetV3/expanded_conv_14/squeeze_excite/Conv/weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"720"},{"size":"184"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"MobilenetV3/expanded_conv_14/squeeze_excite/Conv/biases","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"184"}]}}}}}, + {"name":"MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"184"},{"size":"720"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/biases","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"720"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/depthwise/hard_swish/mul_1/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/expand/hard_swish/mul_1/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/expand/hard_swish/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/depthwise/hard_swish/add/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/mul/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Mean/reduction_indices","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"MobilenetV3/expanded_conv_13/squeeze_excite/Conv/weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"720"},{"size":"184"}]}}}}}, + {"name":"MobilenetV3/expanded_conv_13/squeeze_excite/Conv/biases","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"184"}]}}}}}, + {"name":"MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"184"},{"size":"720"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/biases","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"720"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/add/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/depthwise/hard_swish/mul_1/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/expand/hard_swish/mul_1/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/expand/hard_swish/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/depthwise/hard_swish/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/mul/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Mean/reduction_indices","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}}}}, + {"name":"MobilenetV3/expanded_conv_12/squeeze_excite/Conv/weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"528"},{"size":"136"}]}}}}}, + {"name":"MobilenetV3/expanded_conv_12/squeeze_excite/Conv/biases","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"136"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"136"},{"size":"528"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/biases","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"528"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/depthwise/hard_swish/mul_1/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/expand/hard_swish/mul_1/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/mul/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Mean/reduction_indices","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"MobilenetV3/expanded_conv_11/squeeze_excite/Conv/weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"528"},{"size":"136"}]}}}}}, + {"name":"MobilenetV3/expanded_conv_11/squeeze_excite/Conv/biases","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"136"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"136"},{"size":"528"}]}}}}}, + {"name":"MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/biases","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"528"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/add/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/depthwise/hard_swish/mul_1/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/expand/hard_swish/mul_1/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/expand/hard_swish/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/depthwise/hard_swish/add/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/mul/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Mean/reduction_indices","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}}}}, + {"name":"MobilenetV3/expanded_conv_10/squeeze_excite/Conv/weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"384"},{"size":"96"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"MobilenetV3/expanded_conv_10/squeeze_excite/Conv/biases","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"96"}]}}}}}, + {"name":"MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"96"},{"size":"384"}]}}}}}, + {"name":"MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/biases","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"384"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/add/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/depthwise/hard_swish/mul_1/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/expand/hard_swish/mul_1/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/depthwise/hard_swish/mul_1/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/expand/hard_swish/mul_1/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/expand/hard_swish/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/depthwise/hard_swish/add/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/depthwise/hard_swish/mul_1/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/expand/hard_swish/mul_1/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/expand/hard_swish/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/depthwise/hard_swish/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/depthwise/hard_swish/mul_1/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/expand/hard_swish/mul_1/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/expand/hard_swish/add/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/depthwise/hard_swish/add/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/depthwise/hard_swish/mul_1/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/expand/hard_swish/mul_1/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/mul/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Mean/reduction_indices","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}}}}, + {"name":"MobilenetV3/expanded_conv_5/squeeze_excite/Conv/weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"96"},{"size":"24"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"MobilenetV3/expanded_conv_5/squeeze_excite/Conv/biases","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"24"}]}}}}}, + {"name":"MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"24"},{"size":"96"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/biases","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"96"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/mul/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Mean/reduction_indices","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"MobilenetV3/expanded_conv_4/squeeze_excite/Conv/weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"96"},{"size":"24"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"MobilenetV3/expanded_conv_4/squeeze_excite/Conv/biases","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"24"}]}}}}}, + {"name":"MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"24"},{"size":"96"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/biases","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"96"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/mul/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Mean/reduction_indices","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"MobilenetV3/expanded_conv_3/squeeze_excite/Conv/weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"72"},{"size":"24"}]}}}}}, + {"name":"MobilenetV3/expanded_conv_3/squeeze_excite/Conv/biases","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"24"}]}}}}}, + {"name":"MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"24"},{"size":"72"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/biases","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"72"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/Conv/hard_swish/mul_1/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/images","op":"Placeholder","attr":{"shape":{"shape":{"dim":[{"size":"1"},{"size":"512"},{"size":"512"},{"size":"3"}]}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/Conv/hard_swish/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/expand/hard_swish/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/depthwise/hard_swish/add/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/expand/hard_swish/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/depthwise/hard_swish/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/expand/hard_swish/add/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/depthwise/hard_swish/add/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/extern1/hard_swish/add/y","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"CenternetHead/c5_upsample/branch_y_upsample_resize/depthwise_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"5"},{"size":"5"},{"size":"480"},{"size":"1"}]}}}}}, + {"name":"tower_0/CenternetHead/c5_upsample/up_sampling2d/mul","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/CenternetHead/concat/axis","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{}}}}}, + {"name":"CenternetHead/c4_upsample/branch_y_upsample_resize/depthwise_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"5"},{"size":"5"},{"size":"256"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c4_upsample/up_sampling2d_1/mul","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/CenternetHead/concat_1/axis","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"CenternetHead/c3_upsample/branch_y_upsample_resize/depthwise_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"5"},{"size":"5"},{"size":"192"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c3_upsample/up_sampling2d_2/mul","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/CenternetHead/concat_2/axis","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{}}},"dtype":{"type":"DT_INT32"}}}, + {"name":"tower_0/CenternetHead/Mean/reduction_indices","op":"Const","attr":{"dtype":{"type":"DT_INT32"},"value":{"tensor":{"dtype":"DT_INT32","tensorShape":{"dim":[{"size":"2"}]}}}}}, + {"name":"CenternetHead/conv1x1_se_b/weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"32"},{"size":"128"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"CenternetHead/centernet_wh_output/depthwise_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"128"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"CenternetHead/centernet_wh_output/pointwise_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"128"},{"size":"4"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"CenternetHead/centernet_wh_output/biases","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"4"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/mul/y","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{}}}}}, + {"name":"tower_0/MobilenetV3/Conv/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"3"},{"size":"16"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c4_upsample/branch_y_upsample_resize/separable_conv2d_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"256"},{"size":"96"}]}}}}}, + {"name":"tower_0/MobilenetV3/Conv/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv/depthwise/depthwise_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"16"},{"size":"1"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv/depthwise/depthwise_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv/project/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"16"},{"size":"16"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv/project/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"16"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_1/expand/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"16"},{"size":"64"}]}}}}}, + {"name":"tower_0/CenternetHead/c4_upsample/branch_y_upsample_resize/separable_conv2d_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"96"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_1/expand/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_1/depthwise/depthwise_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"64"},{"size":"1"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_1/depthwise/depthwise_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_1/project/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"64"},{"size":"24"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_1/project/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"24"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_2/expand/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"24"},{"size":"72"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_2/expand/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"72"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_2/depthwise/depthwise_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"72"},{"size":"1"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_2/depthwise/depthwise_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"72"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_2/project/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"72"},{"size":"24"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_2/project/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"24"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/expand/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"24"},{"size":"72"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/expand/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"72"}]}}}}}, + {"name":"tower_0/CenternetHead/c2_reverse/1x1/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"72"},{"size":"18"}]}}}}}, + {"name":"tower_0/CenternetHead/c3_upsample/branch_x_upsample_resize/separable_conv2d_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"192"},{"size":"64"}]}}}}}, + {"name":"tower_0/CenternetHead/c2_reverse/1x1/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"18"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/depthwise/depthwise_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"5"},{"size":"5"},{"size":"72"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/depthwise/depthwise_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"72"}]}}}}}, + {"name":"tower_0/CenternetHead/c3_upsample/branch_x_upsample_resize/separable_conv2d_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c3_upsample/branch_y_upsample_resize/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"192"},{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c2_reverse/3x3/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"18"},{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c2_reverse/3x3/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/project/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"72"},{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c3_upsample/branch_y_upsample_resize/separable_conv2d_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/project/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/expand/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"32"},{"size":"96"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/expand/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"96"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/depthwise/depthwise_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"5"},{"size":"5"},{"size":"96"},{"size":"1"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/depthwise/depthwise_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"96"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/project/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"96"},{"size":"32"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/project/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/expand/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"32"},{"size":"96"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/conv1x1_se_a/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"128"},{"size":"32"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/expand/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"96"}]}}}}}, + {"name":"tower_0/CenternetHead/c3_reverse/1x1/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"96"},{"size":"24"}]}}}}}, + {"name":"tower_0/CenternetHead/conv1x1_se_a/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c3_reverse/1x1/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"24"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/depthwise/depthwise_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"5"},{"size":"5"},{"size":"96"},{"size":"1"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/depthwise/depthwise_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"96"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c3_reverse/3x3/separable_conv2d_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"24"},{"size":"96"}]}}}}}, + {"name":"tower_0/CenternetHead/c3_reverse/3x3/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"96"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/project/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"96"},{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/project/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"32"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/expand/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"32"},{"size":"192"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/expand/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"192"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/depthwise/depthwise_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"192"},{"size":"1"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/depthwise/depthwise_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"192"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/project/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"192"},{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/project/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/expand/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"64"},{"size":"160"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/expand/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"160"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/depthwise/depthwise_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"160"},{"size":"1"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/depthwise/depthwise_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"160"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/project/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"160"},{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/project/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/expand/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"64"},{"size":"144"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/expand/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"144"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/depthwise/depthwise_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"144"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/depthwise/depthwise_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"144"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/project/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"144"},{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/project/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/expand/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"64"},{"size":"144"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/expand/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"144"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/depthwise/depthwise_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"144"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/depthwise/depthwise_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"144"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/project/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"144"},{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/project/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"64"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/expand/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"64"},{"size":"384"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/expand/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"384"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/depthwise/depthwise_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"384"},{"size":"1"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/depthwise/depthwise_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"384"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/project/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"384"},{"size":"88"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/project/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"88"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/expand/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"88"},{"size":"528"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/expand/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"528"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/depthwise/depthwise_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"3"},{"size":"3"},{"size":"528"},{"size":"1"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/depthwise/depthwise_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"528"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/project/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"528"},{"size":"88"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/project/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"88"}]}}}}}, + {"name":"tower_0/CenternetHead/c4_reverse/1x1/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"88"},{"size":"22"}]}}}}}, + {"name":"tower_0/CenternetHead/c4_reverse/1x1/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"22"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/expand/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"88"},{"size":"528"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/expand/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"528"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c4_reverse/3x3/separable_conv2d_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"22"},{"size":"128"}]}}}}}, + {"name":"tower_0/CenternetHead/c4_reverse/3x3/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/depthwise/depthwise_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"5"},{"size":"5"},{"size":"528"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/depthwise/depthwise_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"528"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/project/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"528"},{"size":"120"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/project/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"120"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/expand/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"120"},{"size":"720"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/expand/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"720"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/depthwise/depthwise_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"5"},{"size":"5"},{"size":"720"},{"size":"1"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/depthwise/depthwise_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"720"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/project/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"720"},{"size":"120"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/project/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"120"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/expand/Conv2D_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"120"},{"size":"720"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/expand/Conv2D_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"720"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/depthwise/depthwise_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"5"},{"size":"5"},{"size":"720"},{"size":"1"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/depthwise/depthwise_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"720"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/project/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"720"},{"size":"120"}]}}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/project/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"120"}]}}}}}, + {"name":"tower_0/extern1/Conv2D_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"120"},{"size":"480"}]}}}}}, + {"name":"tower_0/extern1/Conv2D_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"480"}]}}}}}, + {"name":"tower_0/CenternetHead/c5_upsample/branch_x_upsample_resize/separable_conv2d_weights","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"480"},{"size":"128"}]}}}}}, + {"name":"tower_0/CenternetHead/c5_upsample/branch_x_upsample_resize/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"}]}}}}}, + {"name":"tower_0/CenternetHead/c5_upsample/branch_y_upsample_resize/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"480"},{"size":"128"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c5_upsample/branch_y_upsample_resize/separable_conv2d_bn_offset","op":"Const","attr":{"dtype":{"type":"DT_FLOAT"},"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"128"}]}}}}}, + {"name":"tower_0/CenternetHead/c4_upsample/branch_x_upsample_resize/separable_conv2d_weights","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"1"},{"size":"1"},{"size":"256"},{"size":"96"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c4_upsample/branch_x_upsample_resize/separable_conv2d_bn_offset","op":"Const","attr":{"value":{"tensor":{"dtype":"DT_FLOAT","tensorShape":{"dim":[{"size":"96"}]}}},"dtype":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/Conv/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/images","tower_0/MobilenetV3/Conv/Conv2D_weights","tower_0/MobilenetV3/Conv/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"strides":{"list":{"i":["1","2","2","1"]}},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}},"num_args":{"i":"1"},"epsilon":{"f":0},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"dilations":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/MobilenetV3/Conv/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/Conv/hard_swish/mul_1/y","tower_0/MobilenetV3/Conv/BatchNorm/FusedBatchNorm"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/Conv/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/Conv/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/Conv/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/Conv/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/Conv/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/Conv/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/Conv/hard_swish/mul","tower_0/MobilenetV3/Conv/hard_swish/Relu6"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv/depthwise/depthwise","op":"FusedDepthwiseConv2dNative","input":["tower_0/MobilenetV3/Conv/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv/depthwise/depthwise_weights","tower_0/MobilenetV3/expanded_conv/depthwise/depthwise_bn_offset"],"attr":{"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"data_format":{"s":"TkhXQw=="},"padding":{"s":"U0FNRQ=="},"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv/project/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv/depthwise/depthwise","tower_0/MobilenetV3/expanded_conv/project/Conv2D_weights","tower_0/MobilenetV3/expanded_conv/project/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"padding":{"s":"U0FNRQ=="},"use_cudnn_on_gpu":{"b":true},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}},"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"epsilon":{"f":0}}}, + {"name":"tower_0/MobilenetV3/expanded_conv/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv/project/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/Conv/hard_swish/mul_1"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_1/expand/Relu","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv/add","tower_0/MobilenetV3/expanded_conv_1/expand/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_1/expand/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"},"use_cudnn_on_gpu":{"b":true},"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"epsilon":{"f":0}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_1/depthwise/depthwise","op":"FusedDepthwiseConv2dNative","input":["tower_0/MobilenetV3/expanded_conv_1/expand/Relu","tower_0/MobilenetV3/expanded_conv_1/depthwise/depthwise_weights","tower_0/MobilenetV3/expanded_conv_1/depthwise/depthwise_bn_offset"],"attr":{"strides":{"list":{"i":["1","2","2","1"]}},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_1/project/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_1/depthwise/depthwise","tower_0/MobilenetV3/expanded_conv_1/project/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_1/project/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}},"epsilon":{"f":0},"padding":{"s":"U0FNRQ=="},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_2/expand/Relu","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_1/project/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_2/expand/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_2/expand/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"use_cudnn_on_gpu":{"b":true},"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"padding":{"s":"U0FNRQ=="}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_2/depthwise/depthwise","op":"FusedDepthwiseConv2dNative","input":["tower_0/MobilenetV3/expanded_conv_2/expand/Relu","tower_0/MobilenetV3/expanded_conv_2/depthwise/depthwise_weights","tower_0/MobilenetV3/expanded_conv_2/depthwise/depthwise_bn_offset"],"attr":{"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_2/project/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_2/depthwise/depthwise","tower_0/MobilenetV3/expanded_conv_2/project/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_2/project/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"use_cudnn_on_gpu":{"b":true},"num_args":{"i":"1"},"epsilon":{"f":0},"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_2/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_2/project/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_1/project/BatchNorm/FusedBatchNorm"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/expand/Relu","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_2/add","tower_0/MobilenetV3/expanded_conv_3/expand/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_3/expand/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"},"epsilon":{"f":0},"data_format":{"s":"TkhXQw=="},"padding":{"s":"U0FNRQ=="},"num_args":{"i":"1"},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}}}}, + {"name":"tower_0/CenternetHead/c2_reverse/1x1/Relu","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_3/expand/Relu","tower_0/CenternetHead/c2_reverse/1x1/Conv2D_weights","tower_0/CenternetHead/c2_reverse/1x1/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"padding":{"s":"U0FNRQ=="}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/depthwise/depthwise","op":"FusedDepthwiseConv2dNative","input":["tower_0/MobilenetV3/expanded_conv_3/expand/Relu","tower_0/MobilenetV3/expanded_conv_3/depthwise/depthwise_weights","tower_0/MobilenetV3/expanded_conv_3/depthwise/depthwise_bn_offset"],"attr":{"num_args":{"i":"1"},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","2","2","1"]}},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}}}}, + {"name":"tower_0/CenternetHead/c2_reverse/3x3/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["tower_0/CenternetHead/c2_reverse/1x1/Relu","CenternetHead/c2_reverse/3x3/depthwise_weights"],"attr":{"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"padding":{"s":"U0FNRQ=="},"data_format":{"s":"TkhXQw=="}}}, + {"name":"tower_0/CenternetHead/c2_reverse/3x3/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/CenternetHead/c2_reverse/3x3/separable_conv2d/depthwise","tower_0/CenternetHead/c2_reverse/3x3/separable_conv2d_weights","tower_0/CenternetHead/c2_reverse/3x3/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"use_cudnn_on_gpu":{"b":true},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"VkFMSUQ="},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Mean","op":"Mean","input":["tower_0/MobilenetV3/expanded_conv_3/depthwise/depthwise","tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Mean/reduction_indices"],"attr":{"T":{"type":"DT_FLOAT"},"keep_dims":{"b":true},"Tidx":{"type":"DT_INT32"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Conv/Relu","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Mean","MobilenetV3/expanded_conv_3/squeeze_excite/Conv/weights","MobilenetV3/expanded_conv_3/squeeze_excite/Conv/biases"],"device":"/device:CPU:0","attr":{"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"padding":{"s":"U0FNRQ=="},"strides":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/BiasAdd","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Conv/Relu","MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/weights","MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/biases"],"device":"/device:CPU:0","attr":{"num_args":{"i":"1"},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/BiasAdd","tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/mul/y","tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/Relu6"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/mul","tower_0/MobilenetV3/expanded_conv_3/depthwise/depthwise"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_3/project/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/mul","tower_0/MobilenetV3/expanded_conv_3/project/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_3/project/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"padding":{"s":"U0FNRQ=="},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"dilations":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/expand/Relu","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_3/project/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_4/expand/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_4/expand/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"use_cudnn_on_gpu":{"b":true},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"epsilon":{"f":0},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"num_args":{"i":"1"},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/depthwise/depthwise","op":"FusedDepthwiseConv2dNative","input":["tower_0/MobilenetV3/expanded_conv_4/expand/Relu","tower_0/MobilenetV3/expanded_conv_4/depthwise/depthwise_weights","tower_0/MobilenetV3/expanded_conv_4/depthwise/depthwise_bn_offset"],"attr":{"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"strides":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"data_format":{"s":"TkhXQw=="}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Mean","op":"Mean","input":["tower_0/MobilenetV3/expanded_conv_4/depthwise/depthwise","tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Mean/reduction_indices"],"attr":{"T":{"type":"DT_FLOAT"},"Tidx":{"type":"DT_INT32"},"keep_dims":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Conv/Relu","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Mean","MobilenetV3/expanded_conv_4/squeeze_excite/Conv/weights","MobilenetV3/expanded_conv_4/squeeze_excite/Conv/biases"],"device":"/device:CPU:0","attr":{"strides":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"epsilon":{"f":0},"explicit_paddings":{"list":{}},"num_args":{"i":"1"},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"use_cudnn_on_gpu":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/BiasAdd","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Conv/Relu","MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/weights","MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/biases"],"device":"/device:CPU:0","attr":{"padding":{"s":"U0FNRQ=="},"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"epsilon":{"f":0},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"use_cudnn_on_gpu":{"b":true},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/BiasAdd","tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/mul/y","tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/mul","tower_0/MobilenetV3/expanded_conv_4/depthwise/depthwise"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/project/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/mul","tower_0/MobilenetV3/expanded_conv_4/project/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_4/project/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"explicit_paddings":{"list":{}},"num_args":{"i":"1"},"padding":{"s":"U0FNRQ=="},"epsilon":{"f":0},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_4/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_4/project/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_3/project/BatchNorm/FusedBatchNorm"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/expand/Relu","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_4/add","tower_0/MobilenetV3/expanded_conv_5/expand/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_5/expand/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"use_cudnn_on_gpu":{"b":true},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"strides":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}},"padding":{"s":"U0FNRQ=="}}}, + {"name":"tower_0/CenternetHead/c3_reverse/1x1/Relu","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_5/expand/Relu","tower_0/CenternetHead/c3_reverse/1x1/Conv2D_weights","tower_0/CenternetHead/c3_reverse/1x1/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"padding":{"s":"U0FNRQ=="},"use_cudnn_on_gpu":{"b":true},"num_args":{"i":"1"},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"data_format":{"s":"TkhXQw=="}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/depthwise/depthwise","op":"FusedDepthwiseConv2dNative","input":["tower_0/MobilenetV3/expanded_conv_5/expand/Relu","tower_0/MobilenetV3/expanded_conv_5/depthwise/depthwise_weights","tower_0/MobilenetV3/expanded_conv_5/depthwise/depthwise_bn_offset"],"attr":{"explicit_paddings":{"list":{}},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"num_args":{"i":"1"},"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}}}}, + {"name":"tower_0/CenternetHead/c3_reverse/3x3/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["tower_0/CenternetHead/c3_reverse/1x1/Relu","CenternetHead/c3_reverse/3x3/depthwise_weights"],"attr":{"padding":{"s":"U0FNRQ=="},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/CenternetHead/c3_reverse/3x3/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/CenternetHead/c3_reverse/3x3/separable_conv2d/depthwise","tower_0/CenternetHead/c3_reverse/3x3/separable_conv2d_weights","tower_0/CenternetHead/c3_reverse/3x3/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"epsilon":{"f":0},"padding":{"s":"VkFMSUQ="},"use_cudnn_on_gpu":{"b":true},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Mean","op":"Mean","input":["tower_0/MobilenetV3/expanded_conv_5/depthwise/depthwise","tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Mean/reduction_indices"],"attr":{"Tidx":{"type":"DT_INT32"},"keep_dims":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Conv/Relu","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Mean","MobilenetV3/expanded_conv_5/squeeze_excite/Conv/weights","MobilenetV3/expanded_conv_5/squeeze_excite/Conv/biases"],"device":"/device:CPU:0","attr":{"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"padding":{"s":"U0FNRQ=="},"explicit_paddings":{"list":{}},"use_cudnn_on_gpu":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/BiasAdd","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Conv/Relu","MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/weights","MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/biases"],"device":"/device:CPU:0","attr":{"use_cudnn_on_gpu":{"b":true},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"T":{"type":"DT_FLOAT"},"padding":{"s":"U0FNRQ=="},"epsilon":{"f":0},"num_args":{"i":"1"},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/BiasAdd","tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/mul/y","tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/Relu6"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/mul","tower_0/MobilenetV3/expanded_conv_5/depthwise/depthwise"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/project/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/mul","tower_0/MobilenetV3/expanded_conv_5/project/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_5/project/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"explicit_paddings":{"list":{}},"use_cudnn_on_gpu":{"b":true},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"epsilon":{"f":0},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_5/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_5/project/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_4/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/expand/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_5/add","tower_0/MobilenetV3/expanded_conv_6/expand/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_6/expand/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"explicit_paddings":{"list":{}},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/expand/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_6/expand/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_6/expand/BatchNorm/FusedBatchNorm"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/expand/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_6/expand/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_6/expand/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/expand/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_6/expand/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/expand/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_6/expand/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_6/expand/hard_swish/Relu6"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/depthwise/depthwise","op":"FusedDepthwiseConv2dNative","input":["tower_0/MobilenetV3/expanded_conv_6/expand/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_6/depthwise/depthwise_weights","tower_0/MobilenetV3/expanded_conv_6/depthwise/depthwise_bn_offset"],"attr":{"explicit_paddings":{"list":{}},"dilations":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"num_args":{"i":"1"},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","2","2","1"]}},"padding":{"s":"U0FNRQ=="}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/depthwise/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_6/depthwise/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_6/depthwise/depthwise"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/depthwise/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_6/depthwise/depthwise","tower_0/MobilenetV3/expanded_conv_6/depthwise/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/depthwise/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_6/depthwise/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/depthwise/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_6/depthwise/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_6/depthwise/hard_swish/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_6/project/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_6/depthwise/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_6/project/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_6/project/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"use_cudnn_on_gpu":{"b":true},"num_args":{"i":"1"},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/expand/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_6/project/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_7/expand/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_7/expand/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"epsilon":{"f":0},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"num_args":{"i":"1"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/expand/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_7/expand/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_7/expand/BatchNorm/FusedBatchNorm"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/expand/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_7/expand/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_7/expand/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/expand/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_7/expand/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/expand/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_7/expand/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_7/expand/hard_swish/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/depthwise/depthwise","op":"FusedDepthwiseConv2dNative","input":["tower_0/MobilenetV3/expanded_conv_7/expand/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_7/depthwise/depthwise_weights","tower_0/MobilenetV3/expanded_conv_7/depthwise/depthwise_bn_offset"],"attr":{"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"num_args":{"i":"1"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/depthwise/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_7/depthwise/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_7/depthwise/depthwise"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/depthwise/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_7/depthwise/depthwise","tower_0/MobilenetV3/expanded_conv_7/depthwise/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/depthwise/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_7/depthwise/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/depthwise/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_7/depthwise/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_7/depthwise/hard_swish/Relu6"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/project/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_7/depthwise/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_7/project/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_7/project/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"data_format":{"s":"TkhXQw=="},"padding":{"s":"U0FNRQ=="},"explicit_paddings":{"list":{}},"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"use_cudnn_on_gpu":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_7/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_7/project/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_6/project/BatchNorm/FusedBatchNorm"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/expand/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_7/add","tower_0/MobilenetV3/expanded_conv_8/expand/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_8/expand/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"padding":{"s":"U0FNRQ=="},"explicit_paddings":{"list":{}},"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"num_args":{"i":"1"},"dilations":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/expand/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_8/expand/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_8/expand/BatchNorm/FusedBatchNorm"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/expand/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_8/expand/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_8/expand/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/expand/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_8/expand/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/expand/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_8/expand/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_8/expand/hard_swish/Relu6"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/depthwise/depthwise","op":"FusedDepthwiseConv2dNative","input":["tower_0/MobilenetV3/expanded_conv_8/expand/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_8/depthwise/depthwise_weights","tower_0/MobilenetV3/expanded_conv_8/depthwise/depthwise_bn_offset"],"attr":{"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"padding":{"s":"U0FNRQ=="},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/depthwise/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_8/depthwise/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_8/depthwise/depthwise"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/depthwise/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_8/depthwise/depthwise","tower_0/MobilenetV3/expanded_conv_8/depthwise/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/depthwise/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_8/depthwise/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/depthwise/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_8/depthwise/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_8/depthwise/hard_swish/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/project/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_8/depthwise/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_8/project/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_8/project/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"padding":{"s":"U0FNRQ=="},"strides":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_8/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_8/project/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_7/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/expand/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_8/add","tower_0/MobilenetV3/expanded_conv_9/expand/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_9/expand/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"padding":{"s":"U0FNRQ=="},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"num_args":{"i":"1"},"dilations":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/expand/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_9/expand/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_9/expand/BatchNorm/FusedBatchNorm"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/expand/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_9/expand/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_9/expand/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/expand/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_9/expand/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/expand/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_9/expand/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_9/expand/hard_swish/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/depthwise/depthwise","op":"FusedDepthwiseConv2dNative","input":["tower_0/MobilenetV3/expanded_conv_9/expand/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_9/depthwise/depthwise_weights","tower_0/MobilenetV3/expanded_conv_9/depthwise/depthwise_bn_offset"],"attr":{"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"num_args":{"i":"1"},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/depthwise/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_9/depthwise/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_9/depthwise/depthwise"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/depthwise/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_9/depthwise/depthwise","tower_0/MobilenetV3/expanded_conv_9/depthwise/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/depthwise/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_9/depthwise/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/depthwise/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_9/depthwise/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_9/depthwise/hard_swish/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/project/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_9/depthwise/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_9/project/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_9/project/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"strides":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_9/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_9/project/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_8/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/expand/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_9/add","tower_0/MobilenetV3/expanded_conv_10/expand/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_10/expand/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"use_cudnn_on_gpu":{"b":true},"dilations":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"epsilon":{"f":0},"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/expand/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_10/expand/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_10/expand/BatchNorm/FusedBatchNorm"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/expand/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_10/expand/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_10/expand/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/expand/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_10/expand/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/expand/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_10/expand/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_10/expand/hard_swish/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/depthwise/depthwise","op":"FusedDepthwiseConv2dNative","input":["tower_0/MobilenetV3/expanded_conv_10/expand/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_10/depthwise/depthwise_weights","tower_0/MobilenetV3/expanded_conv_10/depthwise/depthwise_bn_offset"],"attr":{"explicit_paddings":{"list":{}},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"num_args":{"i":"1"},"strides":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/depthwise/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_10/depthwise/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_10/depthwise/depthwise"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/depthwise/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_10/depthwise/depthwise","tower_0/MobilenetV3/expanded_conv_10/depthwise/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/depthwise/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_10/depthwise/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/depthwise/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_10/depthwise/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_10/depthwise/hard_swish/Relu6"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Mean","op":"Mean","input":["tower_0/MobilenetV3/expanded_conv_10/depthwise/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Mean/reduction_indices"],"attr":{"keep_dims":{"b":true},"T":{"type":"DT_FLOAT"},"Tidx":{"type":"DT_INT32"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Conv/Relu","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Mean","MobilenetV3/expanded_conv_10/squeeze_excite/Conv/weights","MobilenetV3/expanded_conv_10/squeeze_excite/Conv/biases"],"device":"/device:CPU:0","attr":{"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"num_args":{"i":"1"},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"epsilon":{"f":0},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/BiasAdd","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Conv/Relu","MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/weights","MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/biases"],"device":"/device:CPU:0","attr":{"padding":{"s":"U0FNRQ=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"epsilon":{"f":0}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/BiasAdd","tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/mul/y","tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/mul","tower_0/MobilenetV3/expanded_conv_10/depthwise/hard_swish/mul_1"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_10/project/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/mul","tower_0/MobilenetV3/expanded_conv_10/project/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_10/project/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"explicit_paddings":{"list":{}},"use_cudnn_on_gpu":{"b":true},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"data_format":{"s":"TkhXQw=="},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/expand/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_10/project/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_11/expand/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_11/expand/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"use_cudnn_on_gpu":{"b":true},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"epsilon":{"f":0},"padding":{"s":"U0FNRQ=="},"num_args":{"i":"1"},"strides":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/expand/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_11/expand/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_11/expand/BatchNorm/FusedBatchNorm"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/expand/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_11/expand/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_11/expand/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/expand/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_11/expand/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/expand/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_11/expand/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_11/expand/hard_swish/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/depthwise/depthwise","op":"FusedDepthwiseConv2dNative","input":["tower_0/MobilenetV3/expanded_conv_11/expand/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_11/depthwise/depthwise_weights","tower_0/MobilenetV3/expanded_conv_11/depthwise/depthwise_bn_offset"],"attr":{"strides":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/depthwise/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_11/depthwise/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_11/depthwise/depthwise"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/depthwise/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_11/depthwise/depthwise","tower_0/MobilenetV3/expanded_conv_11/depthwise/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/depthwise/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_11/depthwise/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/depthwise/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_11/depthwise/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_11/depthwise/hard_swish/Relu6"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Mean","op":"Mean","input":["tower_0/MobilenetV3/expanded_conv_11/depthwise/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Mean/reduction_indices"],"attr":{"Tidx":{"type":"DT_INT32"},"keep_dims":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Conv/Relu","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Mean","MobilenetV3/expanded_conv_11/squeeze_excite/Conv/weights","MobilenetV3/expanded_conv_11/squeeze_excite/Conv/biases"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"num_args":{"i":"1"},"dilations":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"T":{"type":"DT_FLOAT"},"padding":{"s":"U0FNRQ=="},"use_cudnn_on_gpu":{"b":true},"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/BiasAdd","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Conv/Relu","MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/weights","MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/biases"],"device":"/device:CPU:0","attr":{"num_args":{"i":"1"},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"use_cudnn_on_gpu":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/BiasAdd","tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/mul/y","tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/mul","tower_0/MobilenetV3/expanded_conv_11/depthwise/hard_swish/mul_1"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/project/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/mul","tower_0/MobilenetV3/expanded_conv_11/project/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_11/project/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"use_cudnn_on_gpu":{"b":true},"T":{"type":"DT_FLOAT"},"epsilon":{"f":0},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_11/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_11/project/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_10/project/BatchNorm/FusedBatchNorm"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c4_reverse/1x1/Relu","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_11/add","tower_0/CenternetHead/c4_reverse/1x1/Conv2D_weights","tower_0/CenternetHead/c4_reverse/1x1/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"strides":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/expand/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_11/add","tower_0/MobilenetV3/expanded_conv_12/expand/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_12/expand/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"strides":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"num_args":{"i":"1"},"padding":{"s":"U0FNRQ=="},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"dilations":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/CenternetHead/c4_reverse/3x3/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["tower_0/CenternetHead/c4_reverse/1x1/Relu","CenternetHead/c4_reverse/3x3/depthwise_weights"],"attr":{"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/expand/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_12/expand/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_12/expand/BatchNorm/FusedBatchNorm"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/expand/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_12/expand/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_12/expand/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c4_reverse/3x3/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/CenternetHead/c4_reverse/3x3/separable_conv2d/depthwise","tower_0/CenternetHead/c4_reverse/3x3/separable_conv2d_weights","tower_0/CenternetHead/c4_reverse/3x3/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"VkFMSUQ="},"use_cudnn_on_gpu":{"b":true},"T":{"type":"DT_FLOAT"},"epsilon":{"f":0},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/expand/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_12/expand/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/expand/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_12/expand/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_12/expand/hard_swish/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/depthwise/depthwise","op":"FusedDepthwiseConv2dNative","input":["tower_0/MobilenetV3/expanded_conv_12/expand/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_12/depthwise/depthwise_weights","tower_0/MobilenetV3/expanded_conv_12/depthwise/depthwise_bn_offset"],"attr":{"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","2","2","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/depthwise/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_12/depthwise/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_12/depthwise/depthwise"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/depthwise/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_12/depthwise/depthwise","tower_0/MobilenetV3/expanded_conv_12/depthwise/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/depthwise/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_12/depthwise/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/depthwise/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_12/depthwise/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_12/depthwise/hard_swish/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Mean","op":"Mean","input":["tower_0/MobilenetV3/expanded_conv_12/depthwise/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Mean/reduction_indices"],"attr":{"T":{"type":"DT_FLOAT"},"Tidx":{"type":"DT_INT32"},"keep_dims":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Conv/Relu","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Mean","MobilenetV3/expanded_conv_12/squeeze_excite/Conv/weights","MobilenetV3/expanded_conv_12/squeeze_excite/Conv/biases"],"device":"/device:CPU:0","attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"num_args":{"i":"1"},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"explicit_paddings":{"list":{}},"use_cudnn_on_gpu":{"b":true},"epsilon":{"f":0}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/BiasAdd","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Conv/Relu","MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/weights","MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/biases"],"device":"/device:CPU:0","attr":{"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"strides":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"epsilon":{"f":0},"T":{"type":"DT_FLOAT"},"use_cudnn_on_gpu":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/BiasAdd","tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/mul/y","tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/mul","tower_0/MobilenetV3/expanded_conv_12/depthwise/hard_swish/mul_1"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_12/project/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/mul","tower_0/MobilenetV3/expanded_conv_12/project/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_12/project/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"strides":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"num_args":{"i":"1"},"use_cudnn_on_gpu":{"b":true},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/expand/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_12/project/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_13/expand/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_13/expand/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"T":{"type":"DT_FLOAT"},"num_args":{"i":"1"},"epsilon":{"f":0},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"use_cudnn_on_gpu":{"b":true},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/expand/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_13/expand/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_13/expand/BatchNorm/FusedBatchNorm"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/expand/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_13/expand/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_13/expand/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/expand/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_13/expand/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/expand/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_13/expand/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_13/expand/hard_swish/Relu6"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/depthwise/depthwise","op":"FusedDepthwiseConv2dNative","input":["tower_0/MobilenetV3/expanded_conv_13/expand/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_13/depthwise/depthwise_weights","tower_0/MobilenetV3/expanded_conv_13/depthwise/depthwise_bn_offset"],"attr":{"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/depthwise/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_13/depthwise/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_13/depthwise/depthwise"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/depthwise/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_13/depthwise/depthwise","tower_0/MobilenetV3/expanded_conv_13/depthwise/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/depthwise/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_13/depthwise/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/depthwise/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_13/depthwise/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_13/depthwise/hard_swish/Relu6"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Mean","op":"Mean","input":["tower_0/MobilenetV3/expanded_conv_13/depthwise/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Mean/reduction_indices"],"attr":{"T":{"type":"DT_FLOAT"},"Tidx":{"type":"DT_INT32"},"keep_dims":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Conv/Relu","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Mean","MobilenetV3/expanded_conv_13/squeeze_excite/Conv/weights","MobilenetV3/expanded_conv_13/squeeze_excite/Conv/biases"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"num_args":{"i":"1"},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}},"use_cudnn_on_gpu":{"b":true},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"padding":{"s":"U0FNRQ=="}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/BiasAdd","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Conv/Relu","MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/weights","MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/biases"],"device":"/device:CPU:0","attr":{"use_cudnn_on_gpu":{"b":true},"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"},"data_format":{"s":"TkhXQw=="},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"epsilon":{"f":0},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/BiasAdd","tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/mul/y","tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/mul","tower_0/MobilenetV3/expanded_conv_13/depthwise/hard_swish/mul_1"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/project/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/mul","tower_0/MobilenetV3/expanded_conv_13/project/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_13/project/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"use_cudnn_on_gpu":{"b":true},"num_args":{"i":"1"},"T":{"type":"DT_FLOAT"},"epsilon":{"f":0},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"data_format":{"s":"TkhXQw=="}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_13/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_13/project/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_12/project/BatchNorm/FusedBatchNorm"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/expand/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_13/add","tower_0/MobilenetV3/expanded_conv_14/expand/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_14/expand/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"use_cudnn_on_gpu":{"b":true},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"explicit_paddings":{"list":{}},"epsilon":{"f":0},"padding":{"s":"U0FNRQ=="},"num_args":{"i":"1"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/expand/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_14/expand/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_14/expand/BatchNorm/FusedBatchNorm"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/expand/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_14/expand/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_14/expand/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/expand/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_14/expand/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/expand/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_14/expand/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_14/expand/hard_swish/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/depthwise/depthwise","op":"FusedDepthwiseConv2dNative","input":["tower_0/MobilenetV3/expanded_conv_14/expand/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_14/depthwise/depthwise_weights","tower_0/MobilenetV3/expanded_conv_14/depthwise/depthwise_bn_offset"],"attr":{"padding":{"s":"U0FNRQ=="},"num_args":{"i":"1"},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/depthwise/hard_swish/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_14/depthwise/hard_swish/mul_1/y","tower_0/MobilenetV3/expanded_conv_14/depthwise/depthwise"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/depthwise/hard_swish/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_14/depthwise/depthwise","tower_0/MobilenetV3/expanded_conv_14/depthwise/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/depthwise/hard_swish/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_14/depthwise/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/depthwise/hard_swish/mul_1","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_14/depthwise/hard_swish/mul","tower_0/MobilenetV3/expanded_conv_14/depthwise/hard_swish/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Mean","op":"Mean","input":["tower_0/MobilenetV3/expanded_conv_14/depthwise/hard_swish/mul_1","tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Mean/reduction_indices"],"attr":{"T":{"type":"DT_FLOAT"},"Tidx":{"type":"DT_INT32"},"keep_dims":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Conv/Relu","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Mean","MobilenetV3/expanded_conv_14/squeeze_excite/Conv/weights","MobilenetV3/expanded_conv_14/squeeze_excite/Conv/biases"],"device":"/device:CPU:0","attr":{"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"epsilon":{"f":0},"num_args":{"i":"1"},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/BiasAdd","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Conv/Relu","MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/weights","MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/biases"],"device":"/device:CPU:0","attr":{"epsilon":{"f":0},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"use_cudnn_on_gpu":{"b":true},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}},"num_args":{"i":"1"},"strides":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/BiasAdd","tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/Relu6","op":"Relu6","input":["tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/mul/y","tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/mul","op":"Mul","input":["tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/mul","tower_0/MobilenetV3/expanded_conv_14/depthwise/hard_swish/mul_1"],"attr":{"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/project/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/mul","tower_0/MobilenetV3/expanded_conv_14/project/Conv2D_weights","tower_0/MobilenetV3/expanded_conv_14/project/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"use_cudnn_on_gpu":{"b":true},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"num_args":{"i":"1"},"data_format":{"s":"TkhXQw=="}}}, + {"name":"tower_0/MobilenetV3/expanded_conv_14/add","op":"Add","input":["tower_0/MobilenetV3/expanded_conv_14/project/BatchNorm/FusedBatchNorm","tower_0/MobilenetV3/expanded_conv_13/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/extern1/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/MobilenetV3/expanded_conv_14/add","tower_0/extern1/Conv2D_weights","tower_0/extern1/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"data_format":{"s":"TkhXQw=="},"padding":{"s":"U0FNRQ=="},"use_cudnn_on_gpu":{"b":true},"T":{"type":"DT_FLOAT"},"epsilon":{"f":0},"num_args":{"i":"1"},"dilations":{"list":{"i":["1","1","1","1"]}},"strides":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}}}}, + {"name":"tower_0/extern1/hard_swish/mul","op":"Mul","input":["tower_0/extern1/hard_swish/mul_1/y","tower_0/extern1/BatchNorm/FusedBatchNorm"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/extern1/hard_swish/add","op":"Add","input":["tower_0/extern1/BatchNorm/FusedBatchNorm","tower_0/extern1/hard_swish/add/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/extern1/hard_swish/Relu6","op":"Relu6","input":["tower_0/extern1/hard_swish/add"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/extern1/hard_swish/mul_1","op":"Mul","input":["tower_0/extern1/hard_swish/mul","tower_0/extern1/hard_swish/Relu6"],"attr":{"T":{"type":"DT_FLOAT"},"_grappler_ArithmeticOptimizer_MinimizeBroadcasts":{"b":true}}}, + {"name":"tower_0/CenternetHead/c5_upsample/branch_x_upsample_resize/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["tower_0/extern1/hard_swish/mul_1","CenternetHead/c5_upsample/branch_x_upsample_resize/depthwise_weights"],"attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/CenternetHead/c5_upsample/branch_y_upsample_resize/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["tower_0/extern1/hard_swish/mul_1","CenternetHead/c5_upsample/branch_y_upsample_resize/depthwise_weights"],"attr":{"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c5_upsample/branch_x_upsample_resize/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/CenternetHead/c5_upsample/branch_x_upsample_resize/separable_conv2d/depthwise","tower_0/CenternetHead/c5_upsample/branch_x_upsample_resize/separable_conv2d_weights","tower_0/CenternetHead/c5_upsample/branch_x_upsample_resize/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"T":{"type":"DT_FLOAT"},"use_cudnn_on_gpu":{"b":true},"epsilon":{"f":0},"padding":{"s":"VkFMSUQ="},"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"num_args":{"i":"1"},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/CenternetHead/c5_upsample/branch_y_upsample_resize/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/CenternetHead/c5_upsample/branch_y_upsample_resize/separable_conv2d/depthwise","tower_0/CenternetHead/c5_upsample/branch_y_upsample_resize/separable_conv2d_weights","tower_0/CenternetHead/c5_upsample/branch_y_upsample_resize/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"explicit_paddings":{"list":{}},"num_args":{"i":"1"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"epsilon":{"f":0},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"use_cudnn_on_gpu":{"b":true},"padding":{"s":"VkFMSUQ="},"dilations":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/CenternetHead/c5_upsample/add","op":"Add","input":["tower_0/CenternetHead/c5_upsample/branch_x_upsample_resize/BatchNorm/FusedBatchNorm","tower_0/CenternetHead/c5_upsample/branch_y_upsample_resize/BatchNorm/FusedBatchNorm"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c5_upsample/up_sampling2d/ResizeBilinear","op":"ResizeBilinear","input":["tower_0/CenternetHead/c5_upsample/add","tower_0/CenternetHead/c5_upsample/up_sampling2d/mul"],"attr":{"align_corners":{"b":false},"T":{"type":"DT_FLOAT"},"half_pixel_centers":{"b":false}}}, + {"name":"tower_0/CenternetHead/concat","op":"ConcatV2","input":["tower_0/CenternetHead/c4_reverse/3x3/BatchNorm/FusedBatchNorm","tower_0/CenternetHead/c5_upsample/up_sampling2d/ResizeBilinear","tower_0/CenternetHead/concat/axis"],"attr":{"N":{"i":"2"},"Tidx":{"type":"DT_INT32"},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/Relu","op":"Relu","input":["tower_0/CenternetHead/concat"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c4_upsample/branch_x_upsample_resize/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["tower_0/CenternetHead/Relu","CenternetHead/c4_upsample/branch_x_upsample_resize/depthwise_weights"],"attr":{"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"padding":{"s":"U0FNRQ=="},"strides":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/CenternetHead/c4_upsample/branch_y_upsample_resize/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["tower_0/CenternetHead/Relu","CenternetHead/c4_upsample/branch_y_upsample_resize/depthwise_weights"],"attr":{"strides":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"dilations":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/CenternetHead/c4_upsample/branch_x_upsample_resize/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/CenternetHead/c4_upsample/branch_x_upsample_resize/separable_conv2d/depthwise","tower_0/CenternetHead/c4_upsample/branch_x_upsample_resize/separable_conv2d_weights","tower_0/CenternetHead/c4_upsample/branch_x_upsample_resize/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"padding":{"s":"VkFMSUQ="},"explicit_paddings":{"list":{}},"use_cudnn_on_gpu":{"b":true},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"epsilon":{"f":0},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}},"num_args":{"i":"1"}}}, + {"name":"tower_0/CenternetHead/c4_upsample/branch_y_upsample_resize/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/CenternetHead/c4_upsample/branch_y_upsample_resize/separable_conv2d/depthwise","tower_0/CenternetHead/c4_upsample/branch_y_upsample_resize/separable_conv2d_weights","tower_0/CenternetHead/c4_upsample/branch_y_upsample_resize/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"VkFMSUQ="},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"dilations":{"list":{"i":["1","1","1","1"]}},"data_format":{"s":"TkhXQw=="},"epsilon":{"f":0},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"use_cudnn_on_gpu":{"b":true},"num_args":{"i":"1"}}}, + {"name":"tower_0/CenternetHead/c4_upsample/add","op":"Add","input":["tower_0/CenternetHead/c4_upsample/branch_x_upsample_resize/BatchNorm/FusedBatchNorm","tower_0/CenternetHead/c4_upsample/branch_y_upsample_resize/BatchNorm/FusedBatchNorm"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c4_upsample/up_sampling2d_1/ResizeBilinear","op":"ResizeBilinear","input":["tower_0/CenternetHead/c4_upsample/add","tower_0/CenternetHead/c4_upsample/up_sampling2d_1/mul"],"attr":{"align_corners":{"b":false},"half_pixel_centers":{"b":false},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/concat_1","op":"ConcatV2","input":["tower_0/CenternetHead/c3_reverse/3x3/BatchNorm/FusedBatchNorm","tower_0/CenternetHead/c4_upsample/up_sampling2d_1/ResizeBilinear","tower_0/CenternetHead/concat_1/axis"],"attr":{"Tidx":{"type":"DT_INT32"},"N":{"i":"2"},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/Relu_1","op":"Relu","input":["tower_0/CenternetHead/concat_1"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c3_upsample/branch_x_upsample_resize/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["tower_0/CenternetHead/Relu_1","CenternetHead/c3_upsample/branch_x_upsample_resize/depthwise_weights"],"attr":{"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}},"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="}}}, + {"name":"tower_0/CenternetHead/c3_upsample/branch_y_upsample_resize/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["tower_0/CenternetHead/Relu_1","CenternetHead/c3_upsample/branch_y_upsample_resize/depthwise_weights"],"attr":{"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"T":{"type":"DT_FLOAT"},"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}},"dilations":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/CenternetHead/c3_upsample/branch_x_upsample_resize/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/CenternetHead/c3_upsample/branch_x_upsample_resize/separable_conv2d/depthwise","tower_0/CenternetHead/c3_upsample/branch_x_upsample_resize/separable_conv2d_weights","tower_0/CenternetHead/c3_upsample/branch_x_upsample_resize/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"num_args":{"i":"1"},"padding":{"s":"VkFMSUQ="}}}, + {"name":"tower_0/CenternetHead/c3_upsample/branch_y_upsample_resize/BatchNorm/FusedBatchNorm","op":"_FusedConv2D","input":["tower_0/CenternetHead/c3_upsample/branch_y_upsample_resize/separable_conv2d/depthwise","tower_0/CenternetHead/c3_upsample/branch_y_upsample_resize/separable_conv2d_weights","tower_0/CenternetHead/c3_upsample/branch_y_upsample_resize/separable_conv2d_bn_offset"],"device":"/device:CPU:0","attr":{"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}},"epsilon":{"f":0},"dilations":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"use_cudnn_on_gpu":{"b":true},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"padding":{"s":"VkFMSUQ="},"num_args":{"i":"1"}}}, + {"name":"tower_0/CenternetHead/c3_upsample/add","op":"Add","input":["tower_0/CenternetHead/c3_upsample/branch_x_upsample_resize/BatchNorm/FusedBatchNorm","tower_0/CenternetHead/c3_upsample/branch_y_upsample_resize/BatchNorm/FusedBatchNorm"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/c3_upsample/up_sampling2d_2/ResizeBilinear","op":"ResizeBilinear","input":["tower_0/CenternetHead/c3_upsample/add","tower_0/CenternetHead/c3_upsample/up_sampling2d_2/mul"],"attr":{"align_corners":{"b":false},"T":{"type":"DT_FLOAT"},"half_pixel_centers":{"b":false}}}, + {"name":"tower_0/CenternetHead/concat_2","op":"ConcatV2","input":["tower_0/CenternetHead/c2_reverse/3x3/BatchNorm/FusedBatchNorm","tower_0/CenternetHead/c3_upsample/up_sampling2d_2/ResizeBilinear","tower_0/CenternetHead/concat_2/axis"],"attr":{"N":{"i":"2"},"T":{"type":"DT_FLOAT"},"Tidx":{"type":"DT_INT32"}}}, + {"name":"tower_0/CenternetHead/Relu_2","op":"Relu","input":["tower_0/CenternetHead/concat_2"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/Mean","op":"Mean","input":["tower_0/CenternetHead/Relu_2","tower_0/CenternetHead/Mean/reduction_indices"],"attr":{"T":{"type":"DT_FLOAT"},"keep_dims":{"b":true},"Tidx":{"type":"DT_INT32"}}}, + {"name":"tower_0/CenternetHead/conv1x1_se_a/Relu","op":"_FusedConv2D","input":["tower_0/CenternetHead/Mean","tower_0/CenternetHead/conv1x1_se_a/Conv2D_weights","tower_0/CenternetHead/conv1x1_se_a/Conv2D_bn_offset"],"device":"/device:CPU:0","attr":{"data_format":{"s":"TkhXQw=="},"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA==","UmVsdQ=="]}},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"epsilon":{"f":0},"use_cudnn_on_gpu":{"b":true},"num_args":{"i":"1"},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/conv1x1_se_b/Conv2D","op":"Conv2D","input":["tower_0/CenternetHead/conv1x1_se_a/Relu","CenternetHead/conv1x1_se_b/weights"],"device":"/device:CPU:0","attr":{"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"use_cudnn_on_gpu":{"b":true},"padding":{"s":"U0FNRQ=="},"dilations":{"list":{"i":["1","1","1","1"]}},"explicit_paddings":{"list":{}}}}, + {"name":"tower_0/CenternetHead/Sigmoid","op":"Sigmoid","input":["tower_0/CenternetHead/conv1x1_se_b/Conv2D"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/mul","op":"Mul","input":["tower_0/CenternetHead/Relu_2","tower_0/CenternetHead/Sigmoid"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/CenternetHead/centernet_cls_output/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["tower_0/CenternetHead/mul","CenternetHead/centernet_cls_output/depthwise_weights"],"attr":{"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="}}}, + {"name":"tower_0/CenternetHead/centernet_wh_output/separable_conv2d/depthwise","op":"DepthwiseConv2dNative","input":["tower_0/CenternetHead/mul","CenternetHead/centernet_wh_output/depthwise_weights"],"attr":{"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"dilations":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"U0FNRQ=="},"data_format":{"s":"TkhXQw=="}}}, + {"name":"tower_0/CenternetHead/centernet_cls_output/BiasAdd","op":"_FusedConv2D","input":["tower_0/CenternetHead/centernet_cls_output/separable_conv2d/depthwise","CenternetHead/centernet_cls_output/pointwise_weights","CenternetHead/centernet_cls_output/biases"],"device":"/device:CPU:0","attr":{"T":{"type":"DT_FLOAT"},"explicit_paddings":{"list":{}},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"dilations":{"list":{"i":["1","1","1","1"]}},"use_cudnn_on_gpu":{"b":true},"data_format":{"s":"TkhXQw=="},"num_args":{"i":"1"},"padding":{"s":"VkFMSUQ="},"epsilon":{"f":0},"strides":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/CenternetHead/centernet_wh_output/BiasAdd","op":"_FusedConv2D","input":["tower_0/CenternetHead/centernet_wh_output/separable_conv2d/depthwise","CenternetHead/centernet_wh_output/pointwise_weights","CenternetHead/centernet_wh_output/biases"],"device":"/device:CPU:0","attr":{"strides":{"list":{"i":["1","1","1","1"]}},"padding":{"s":"VkFMSUQ="},"num_args":{"i":"1"},"use_cudnn_on_gpu":{"b":true},"epsilon":{"f":0},"explicit_paddings":{"list":{}},"data_format":{"s":"TkhXQw=="},"T":{"type":"DT_FLOAT"},"fused_ops":{"list":{"s":["Qmlhc0FkZA=="]}},"dilations":{"list":{"i":["1","1","1","1"]}}}}, + {"name":"tower_0/Sigmoid","op":"Sigmoid","input":["tower_0/CenternetHead/centernet_cls_output/BiasAdd"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/mul","op":"Mul","input":["tower_0/CenternetHead/centernet_wh_output/BiasAdd","tower_0/mul/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/keypoints","op":"Identity","input":["tower_0/Sigmoid"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/wh","op":"Identity","input":["tower_0/mul"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/max_pooling2d/MaxPool","op":"MaxPool","input":["tower_0/keypoints"],"attr":{"explicit_paddings":{"list":{}},"strides":{"list":{"i":["1","1","1","1"]}},"T":{"type":"DT_FLOAT"},"padding":{"s":"U0FNRQ=="},"data_format":{"s":"TkhXQw=="},"ksize":{"list":{"i":["1","3","3","1"]}}}}, + {"name":"tower_0/BatchGather/Reshape_2","op":"Reshape","input":["tower_0/wh","tower_0/BatchGather/concat"],"attr":{"Tshape":{"type":"DT_INT32"},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/Equal","op":"Equal","input":["tower_0/keypoints","tower_0/max_pooling2d/MaxPool"],"attr":{"incompatible_shape_error":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/Cast","op":"Cast","input":["tower_0/Equal"],"attr":{"SrcT":{"type":"DT_BOOL"},"DstT":{"type":"DT_FLOAT"},"Truncate":{"b":false}}}, + {"name":"tower_0/mul_1","op":"Mul","input":["tower_0/keypoints","tower_0/Cast"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/Reshape","op":"Reshape","input":["tower_0/mul_1","tower_0/Reshape/shape"],"attr":{"T":{"type":"DT_FLOAT"},"Tshape":{"type":"DT_INT32"}}}, + {"name":"tower_0/TopKV2","op":"TopKV2","input":["tower_0/Reshape","tower_0/TopKV2/k"],"attr":{"sorted":{"b":true},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/ExpandDims_3","op":"ExpandDims","input":["tower_0/TopKV2","tower_0/ExpandDims_3/dim"],"attr":{"Tdim":{"type":"DT_INT32"},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/floordiv_1","op":"FloorDiv","input":["tower_0/TopKV2:1","tower_0/strided_slice_7"],"attr":{"T":{"type":"DT_INT32"}}}, + {"name":"tower_0/floordiv","op":"FloorDiv","input":["tower_0/TopKV2:1","tower_0/strided_slice_7"],"attr":{"T":{"type":"DT_INT32"}}}, + {"name":"tower_0/mod","op":"FloorMod","input":["tower_0/TopKV2:1","tower_0/strided_slice_7"],"attr":{"T":{"type":"DT_INT32"}}}, + {"name":"tower_0/floordiv_2","op":"FloorDiv","input":["tower_0/floordiv_1","tower_0/strided_slice_6"],"attr":{"T":{"type":"DT_INT32"}}}, + {"name":"tower_0/mod_1","op":"FloorMod","input":["tower_0/floordiv","tower_0/strided_slice_6"],"attr":{"T":{"type":"DT_INT32"}}}, + {"name":"tower_0/ExpandDims_2","op":"ExpandDims","input":["tower_0/mod","tower_0/ExpandDims_2/dim"],"attr":{"Tdim":{"type":"DT_INT32"},"T":{"type":"DT_INT32"}}}, + {"name":"tower_0/ExpandDims_1","op":"ExpandDims","input":["tower_0/floordiv_2","tower_0/ExpandDims_1/dim"],"attr":{"Tdim":{"type":"DT_INT32"},"T":{"type":"DT_INT32"}}}, + {"name":"tower_0/mul_2","op":"Mul","input":["tower_0/floordiv_2","tower_0/strided_slice_6"],"attr":{"T":{"type":"DT_INT32"}}}, + {"name":"tower_0/ExpandDims","op":"ExpandDims","input":["tower_0/mod_1","tower_0/ExpandDims/dim"],"attr":{"T":{"type":"DT_INT32"},"Tdim":{"type":"DT_INT32"}}}, + {"name":"tower_0/Cast_3","op":"Cast","input":["tower_0/ExpandDims_2"],"attr":{"Truncate":{"b":false},"DstT":{"type":"DT_FLOAT"},"SrcT":{"type":"DT_INT32"}}}, + {"name":"tower_0/Cast_2","op":"Cast","input":["tower_0/ExpandDims_1"],"attr":{"SrcT":{"type":"DT_INT32"},"Truncate":{"b":false},"DstT":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/BatchGather/add","op":"Add","input":["tower_0/mul_2","tower_0/mod_1"],"attr":{"T":{"type":"DT_INT32"}}}, + {"name":"tower_0/Cast_1","op":"Cast","input":["tower_0/ExpandDims"],"attr":{"DstT":{"type":"DT_FLOAT"},"SrcT":{"type":"DT_INT32"},"Truncate":{"b":false}}}, + {"name":"tower_0/mul_4","op":"Mul","input":["tower_0/Cast_2","tower_0/mul_4/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/mul_6","op":"Mul","input":["tower_0/Cast_2","tower_0/mul_6/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/BatchGather/Reshape_1","op":"Reshape","input":["tower_0/BatchGather/add","tower_0/BatchGather/Reshape_1/shape"],"attr":{"Tshape":{"type":"DT_INT32"},"T":{"type":"DT_INT32"}}}, + {"name":"tower_0/mul_3","op":"Mul","input":["tower_0/Cast_1","tower_0/mul_3/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/mul_5","op":"Mul","input":["tower_0/Cast_1","tower_0/mul_5/y"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/BatchGather/GatherV2","op":"GatherV2","input":["tower_0/BatchGather/Reshape_2","tower_0/BatchGather/Reshape_1","tower_0/BatchGather/GatherV2/axis"],"attr":{"Taxis":{"type":"DT_INT32"},"batch_dims":{"i":"0"},"Tindices":{"type":"DT_INT32"},"Tparams":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/BatchGather/Reshape_3","op":"Reshape","input":["tower_0/BatchGather/GatherV2","tower_0/BatchGather/concat_1"],"attr":{"Tshape":{"type":"DT_INT32"},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/strided_slice_11","op":"StridedSlice","input":["tower_0/BatchGather/Reshape_3","tower_0/strided_slice_11/stack","tower_0/strided_slice_11/stack_1","tower_0/strided_slice_11/stack_2"],"attr":{"begin_mask":{"i":"3"},"shrink_axis_mask":{"i":"0"},"new_axis_mask":{"i":"0"},"end_mask":{"i":"3"},"T":{"type":"DT_FLOAT"},"Index":{"type":"DT_INT32"},"ellipsis_mask":{"i":"0"}}}, + {"name":"tower_0/strided_slice_12","op":"StridedSlice","input":["tower_0/BatchGather/Reshape_3","tower_0/strided_slice_12/stack","tower_0/strided_slice_12/stack_1","tower_0/strided_slice_12/stack_2"],"attr":{"end_mask":{"i":"3"},"Index":{"type":"DT_INT32"},"T":{"type":"DT_FLOAT"},"begin_mask":{"i":"3"},"ellipsis_mask":{"i":"0"},"shrink_axis_mask":{"i":"0"},"new_axis_mask":{"i":"0"}}}, + {"name":"tower_0/strided_slice_9","op":"StridedSlice","input":["tower_0/BatchGather/Reshape_3","tower_0/strided_slice_9/stack","tower_0/strided_slice_9/stack_1","tower_0/strided_slice_9/stack_2"],"attr":{"ellipsis_mask":{"i":"0"},"T":{"type":"DT_FLOAT"},"Index":{"type":"DT_INT32"},"end_mask":{"i":"3"},"shrink_axis_mask":{"i":"0"},"new_axis_mask":{"i":"0"},"begin_mask":{"i":"3"}}}, + {"name":"tower_0/strided_slice_10","op":"StridedSlice","input":["tower_0/BatchGather/Reshape_3","tower_0/strided_slice_10/stack","tower_0/strided_slice_10/stack_1","tower_0/strided_slice_10/stack_2"],"attr":{"end_mask":{"i":"3"},"new_axis_mask":{"i":"0"},"shrink_axis_mask":{"i":"0"},"begin_mask":{"i":"3"},"ellipsis_mask":{"i":"0"},"T":{"type":"DT_FLOAT"},"Index":{"type":"DT_INT32"}}}, + {"name":"tower_0/add_1","op":"Add","input":["tower_0/mul_5","tower_0/strided_slice_11"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/add_2","op":"Add","input":["tower_0/mul_6","tower_0/strided_slice_12"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/sub","op":"Sub","input":["tower_0/mul_3","tower_0/strided_slice_9"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/sub_1","op":"Sub","input":["tower_0/mul_4","tower_0/strided_slice_10"],"attr":{"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/concat_1","op":"ConcatV2","input":["tower_0/sub","tower_0/sub_1","tower_0/add_1","tower_0/add_2","tower_0/ExpandDims_3","tower_0/Cast_3","tower_0/concat_1/axis"],"attr":{"Tidx":{"type":"DT_INT32"},"N":{"i":"6"},"T":{"type":"DT_FLOAT"}}}, + {"name":"tower_0/detections","op":"Identity","input":["tower_0/concat_1"],"attr":{"T":{"type":"DT_FLOAT"}}} + ], + "library": {}, + "versions": {} + }, + "weightsManifest": + [ + { + "paths": ["centernet.bin"], + "weights": [{"name":"tower_0/mul_3/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/strided_slice_9/stack","shape":[3],"dtype":"int32"},{"name":"tower_0/strided_slice_9/stack_1","shape":[3],"dtype":"int32"},{"name":"tower_0/strided_slice_9/stack_2","shape":[3],"dtype":"int32"},{"name":"tower_0/mul_4/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/strided_slice_10/stack","shape":[3],"dtype":"int32"},{"name":"tower_0/strided_slice_10/stack_1","shape":[3],"dtype":"int32"},{"name":"tower_0/strided_slice_10/stack_2","shape":[3],"dtype":"int32"},{"name":"tower_0/ExpandDims/dim","shape":[],"dtype":"int32"},{"name":"tower_0/mul_5/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/strided_slice_11/stack","shape":[3],"dtype":"int32"},{"name":"tower_0/strided_slice_11/stack_1","shape":[3],"dtype":"int32"},{"name":"tower_0/strided_slice_11/stack_2","shape":[3],"dtype":"int32"},{"name":"tower_0/ExpandDims_1/dim","shape":[],"dtype":"int32"},{"name":"tower_0/mul_6/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/BatchGather/concat","shape":[2],"dtype":"int32"},{"name":"tower_0/strided_slice_6","shape":[],"dtype":"int32"},{"name":"tower_0/BatchGather/Reshape_1/shape","shape":[1],"dtype":"int32"},{"name":"tower_0/BatchGather/GatherV2/axis","shape":[],"dtype":"int32"},{"name":"tower_0/BatchGather/concat_1","shape":[3],"dtype":"int32"},{"name":"tower_0/strided_slice_12/stack","shape":[3],"dtype":"int32"},{"name":"tower_0/strided_slice_12/stack_1","shape":[3],"dtype":"int32"},{"name":"tower_0/strided_slice_12/stack_2","shape":[3],"dtype":"int32"},{"name":"tower_0/ExpandDims_3/dim","shape":[],"dtype":"int32"},{"name":"tower_0/Reshape/shape","shape":[2],"dtype":"int32"},{"name":"tower_0/TopKV2/k","shape":[],"dtype":"int32"},{"name":"tower_0/strided_slice_7","shape":[],"dtype":"int32"},{"name":"tower_0/ExpandDims_2/dim","shape":[],"dtype":"int32"},{"name":"tower_0/concat_1/axis","shape":[],"dtype":"int32"},{"name":"CenternetHead/centernet_cls_output/depthwise_weights","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"CenternetHead/centernet_cls_output/pointwise_weights","shape":[1,1,128,80],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"CenternetHead/centernet_cls_output/biases","shape":[80],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"CenternetHead/c2_reverse/3x3/depthwise_weights","shape":[5,5,18,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"CenternetHead/c3_upsample/branch_x_upsample_resize/depthwise_weights","shape":[3,3,192,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"CenternetHead/c3_reverse/3x3/depthwise_weights","shape":[5,5,24,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"CenternetHead/c4_upsample/branch_x_upsample_resize/depthwise_weights","shape":[3,3,256,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"CenternetHead/c4_reverse/3x3/depthwise_weights","shape":[5,5,22,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"CenternetHead/c5_upsample/branch_x_upsample_resize/depthwise_weights","shape":[3,3,480,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/extern1/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/mul/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Mean/reduction_indices","shape":[2],"dtype":"int32"},{"name":"MobilenetV3/expanded_conv_14/squeeze_excite/Conv/weights","shape":[1,1,720,184],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_14/squeeze_excite/Conv/biases","shape":[184],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/weights","shape":[1,1,184,720],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/biases","shape":[720],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_14/squeeze_excite/Conv_1/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_14/depthwise/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_14/expand/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_14/expand/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_14/depthwise/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/mul/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Mean/reduction_indices","shape":[2],"dtype":"int32"},{"name":"MobilenetV3/expanded_conv_13/squeeze_excite/Conv/weights","shape":[1,1,720,184],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_13/squeeze_excite/Conv/biases","shape":[184],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/weights","shape":[1,1,184,720],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/biases","shape":[720],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_13/squeeze_excite/Conv_1/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_13/depthwise/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_13/expand/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_13/expand/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_13/depthwise/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/mul/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Mean/reduction_indices","shape":[2],"dtype":"int32"},{"name":"MobilenetV3/expanded_conv_12/squeeze_excite/Conv/weights","shape":[1,1,528,136],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_12/squeeze_excite/Conv/biases","shape":[136],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/weights","shape":[1,1,136,528],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/biases","shape":[528],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_12/squeeze_excite/Conv_1/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_12/depthwise/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_12/expand/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/mul/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Mean/reduction_indices","shape":[2],"dtype":"int32"},{"name":"MobilenetV3/expanded_conv_11/squeeze_excite/Conv/weights","shape":[1,1,528,136],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_11/squeeze_excite/Conv/biases","shape":[136],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/weights","shape":[1,1,136,528],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/biases","shape":[528],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_11/squeeze_excite/Conv_1/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_11/depthwise/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_11/expand/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_11/expand/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_11/depthwise/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/mul/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Mean/reduction_indices","shape":[2],"dtype":"int32"},{"name":"MobilenetV3/expanded_conv_10/squeeze_excite/Conv/weights","shape":[1,1,384,96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_10/squeeze_excite/Conv/biases","shape":[96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/weights","shape":[1,1,96,384],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/biases","shape":[384],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_10/squeeze_excite/Conv_1/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_10/depthwise/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_10/expand/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_9/depthwise/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_9/expand/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_9/expand/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_9/depthwise/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_8/depthwise/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_8/expand/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_8/expand/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_8/depthwise/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_7/depthwise/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_7/expand/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_7/expand/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_7/depthwise/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_6/depthwise/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_6/expand/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/mul/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Mean/reduction_indices","shape":[2],"dtype":"int32"},{"name":"MobilenetV3/expanded_conv_5/squeeze_excite/Conv/weights","shape":[1,1,96,24],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_5/squeeze_excite/Conv/biases","shape":[24],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/weights","shape":[1,1,24,96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/biases","shape":[96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_5/squeeze_excite/Conv_1/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/mul/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Mean/reduction_indices","shape":[2],"dtype":"int32"},{"name":"MobilenetV3/expanded_conv_4/squeeze_excite/Conv/weights","shape":[1,1,96,24],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_4/squeeze_excite/Conv/biases","shape":[24],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/weights","shape":[1,1,24,96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/biases","shape":[96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_4/squeeze_excite/Conv_1/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/mul/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Mean/reduction_indices","shape":[2],"dtype":"int32"},{"name":"MobilenetV3/expanded_conv_3/squeeze_excite/Conv/weights","shape":[1,1,72,24],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_3/squeeze_excite/Conv/biases","shape":[24],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/weights","shape":[1,1,24,72],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/biases","shape":[72],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_3/squeeze_excite/Conv_1/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/Conv/hard_swish/mul_1/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/Conv/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_6/expand/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_6/depthwise/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_10/expand/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_10/depthwise/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_12/expand/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_12/depthwise/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/extern1/hard_swish/add/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"CenternetHead/c5_upsample/branch_y_upsample_resize/depthwise_weights","shape":[5,5,480,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c5_upsample/up_sampling2d/mul","shape":[2],"dtype":"int32"},{"name":"tower_0/CenternetHead/concat/axis","shape":[],"dtype":"int32"},{"name":"CenternetHead/c4_upsample/branch_y_upsample_resize/depthwise_weights","shape":[5,5,256,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c4_upsample/up_sampling2d_1/mul","shape":[2],"dtype":"int32"},{"name":"tower_0/CenternetHead/concat_1/axis","shape":[],"dtype":"int32"},{"name":"CenternetHead/c3_upsample/branch_y_upsample_resize/depthwise_weights","shape":[5,5,192,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c3_upsample/up_sampling2d_2/mul","shape":[2],"dtype":"int32"},{"name":"tower_0/CenternetHead/concat_2/axis","shape":[],"dtype":"int32"},{"name":"tower_0/CenternetHead/Mean/reduction_indices","shape":[2],"dtype":"int32"},{"name":"CenternetHead/conv1x1_se_b/weights","shape":[1,1,32,128],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"CenternetHead/centernet_wh_output/depthwise_weights","shape":[3,3,128,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"CenternetHead/centernet_wh_output/pointwise_weights","shape":[1,1,128,4],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"CenternetHead/centernet_wh_output/biases","shape":[4],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/mul/y","shape":[],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/Conv/Conv2D_weights","shape":[3,3,3,16],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c4_upsample/branch_y_upsample_resize/separable_conv2d_weights","shape":[1,1,256,96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/Conv/Conv2D_bn_offset","shape":[16],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv/depthwise/depthwise_weights","shape":[3,3,16,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv/depthwise/depthwise_bn_offset","shape":[16],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv/project/Conv2D_weights","shape":[1,1,16,16],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv/project/Conv2D_bn_offset","shape":[16],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_1/expand/Conv2D_weights","shape":[1,1,16,64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c4_upsample/branch_y_upsample_resize/separable_conv2d_bn_offset","shape":[96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_1/expand/Conv2D_bn_offset","shape":[64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_1/depthwise/depthwise_weights","shape":[3,3,64,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_1/depthwise/depthwise_bn_offset","shape":[64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_1/project/Conv2D_weights","shape":[1,1,64,24],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_1/project/Conv2D_bn_offset","shape":[24],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_2/expand/Conv2D_weights","shape":[1,1,24,72],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_2/expand/Conv2D_bn_offset","shape":[72],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_2/depthwise/depthwise_weights","shape":[3,3,72,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_2/depthwise/depthwise_bn_offset","shape":[72],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_2/project/Conv2D_weights","shape":[1,1,72,24],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_2/project/Conv2D_bn_offset","shape":[24],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_3/expand/Conv2D_weights","shape":[1,1,24,72],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_3/expand/Conv2D_bn_offset","shape":[72],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c2_reverse/1x1/Conv2D_weights","shape":[1,1,72,18],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c3_upsample/branch_x_upsample_resize/separable_conv2d_weights","shape":[1,1,192,64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c2_reverse/1x1/Conv2D_bn_offset","shape":[18],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_3/depthwise/depthwise_weights","shape":[5,5,72,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_3/depthwise/depthwise_bn_offset","shape":[72],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c3_upsample/branch_x_upsample_resize/separable_conv2d_bn_offset","shape":[64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c3_upsample/branch_y_upsample_resize/separable_conv2d_weights","shape":[1,1,192,64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c2_reverse/3x3/separable_conv2d_weights","shape":[1,1,18,64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c2_reverse/3x3/separable_conv2d_bn_offset","shape":[64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_3/project/Conv2D_weights","shape":[1,1,72,32],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c3_upsample/branch_y_upsample_resize/separable_conv2d_bn_offset","shape":[64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_3/project/Conv2D_bn_offset","shape":[32],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_4/expand/Conv2D_weights","shape":[1,1,32,96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_4/expand/Conv2D_bn_offset","shape":[96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_4/depthwise/depthwise_weights","shape":[5,5,96,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_4/depthwise/depthwise_bn_offset","shape":[96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_4/project/Conv2D_weights","shape":[1,1,96,32],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_4/project/Conv2D_bn_offset","shape":[32],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_5/expand/Conv2D_weights","shape":[1,1,32,96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/conv1x1_se_a/Conv2D_weights","shape":[1,1,128,32],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_5/expand/Conv2D_bn_offset","shape":[96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c3_reverse/1x1/Conv2D_weights","shape":[1,1,96,24],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/conv1x1_se_a/Conv2D_bn_offset","shape":[32],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c3_reverse/1x1/Conv2D_bn_offset","shape":[24],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_5/depthwise/depthwise_weights","shape":[5,5,96,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_5/depthwise/depthwise_bn_offset","shape":[96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c3_reverse/3x3/separable_conv2d_weights","shape":[1,1,24,96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c3_reverse/3x3/separable_conv2d_bn_offset","shape":[96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_5/project/Conv2D_weights","shape":[1,1,96,32],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_5/project/Conv2D_bn_offset","shape":[32],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_6/expand/Conv2D_weights","shape":[1,1,32,192],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_6/expand/Conv2D_bn_offset","shape":[192],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_6/depthwise/depthwise_weights","shape":[3,3,192,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_6/depthwise/depthwise_bn_offset","shape":[192],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_6/project/Conv2D_weights","shape":[1,1,192,64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_6/project/Conv2D_bn_offset","shape":[64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_7/expand/Conv2D_weights","shape":[1,1,64,160],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_7/expand/Conv2D_bn_offset","shape":[160],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_7/depthwise/depthwise_weights","shape":[3,3,160,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_7/depthwise/depthwise_bn_offset","shape":[160],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_7/project/Conv2D_weights","shape":[1,1,160,64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_7/project/Conv2D_bn_offset","shape":[64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_8/expand/Conv2D_weights","shape":[1,1,64,144],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_8/expand/Conv2D_bn_offset","shape":[144],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_8/depthwise/depthwise_weights","shape":[3,3,144,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_8/depthwise/depthwise_bn_offset","shape":[144],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_8/project/Conv2D_weights","shape":[1,1,144,64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_8/project/Conv2D_bn_offset","shape":[64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_9/expand/Conv2D_weights","shape":[1,1,64,144],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_9/expand/Conv2D_bn_offset","shape":[144],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_9/depthwise/depthwise_weights","shape":[3,3,144,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_9/depthwise/depthwise_bn_offset","shape":[144],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_9/project/Conv2D_weights","shape":[1,1,144,64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_9/project/Conv2D_bn_offset","shape":[64],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_10/expand/Conv2D_weights","shape":[1,1,64,384],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_10/expand/Conv2D_bn_offset","shape":[384],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_10/depthwise/depthwise_weights","shape":[3,3,384,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_10/depthwise/depthwise_bn_offset","shape":[384],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_10/project/Conv2D_weights","shape":[1,1,384,88],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_10/project/Conv2D_bn_offset","shape":[88],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_11/expand/Conv2D_weights","shape":[1,1,88,528],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_11/expand/Conv2D_bn_offset","shape":[528],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_11/depthwise/depthwise_weights","shape":[3,3,528,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_11/depthwise/depthwise_bn_offset","shape":[528],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_11/project/Conv2D_weights","shape":[1,1,528,88],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_11/project/Conv2D_bn_offset","shape":[88],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c4_reverse/1x1/Conv2D_weights","shape":[1,1,88,22],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c4_reverse/1x1/Conv2D_bn_offset","shape":[22],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_12/expand/Conv2D_weights","shape":[1,1,88,528],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_12/expand/Conv2D_bn_offset","shape":[528],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c4_reverse/3x3/separable_conv2d_weights","shape":[1,1,22,128],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c4_reverse/3x3/separable_conv2d_bn_offset","shape":[128],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_12/depthwise/depthwise_weights","shape":[5,5,528,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_12/depthwise/depthwise_bn_offset","shape":[528],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_12/project/Conv2D_weights","shape":[1,1,528,120],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_12/project/Conv2D_bn_offset","shape":[120],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_13/expand/Conv2D_weights","shape":[1,1,120,720],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_13/expand/Conv2D_bn_offset","shape":[720],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_13/depthwise/depthwise_weights","shape":[5,5,720,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_13/depthwise/depthwise_bn_offset","shape":[720],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_13/project/Conv2D_weights","shape":[1,1,720,120],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_13/project/Conv2D_bn_offset","shape":[120],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_14/expand/Conv2D_weights","shape":[1,1,120,720],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_14/expand/Conv2D_bn_offset","shape":[720],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_14/depthwise/depthwise_weights","shape":[5,5,720,1],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_14/depthwise/depthwise_bn_offset","shape":[720],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_14/project/Conv2D_weights","shape":[1,1,720,120],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/MobilenetV3/expanded_conv_14/project/Conv2D_bn_offset","shape":[120],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/extern1/Conv2D_weights","shape":[1,1,120,480],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/extern1/Conv2D_bn_offset","shape":[480],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c5_upsample/branch_x_upsample_resize/separable_conv2d_weights","shape":[1,1,480,128],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c5_upsample/branch_x_upsample_resize/separable_conv2d_bn_offset","shape":[128],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c5_upsample/branch_y_upsample_resize/separable_conv2d_weights","shape":[1,1,480,128],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c5_upsample/branch_y_upsample_resize/separable_conv2d_bn_offset","shape":[128],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c4_upsample/branch_x_upsample_resize/separable_conv2d_weights","shape":[1,1,256,96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}},{"name":"tower_0/CenternetHead/c4_upsample/branch_x_upsample_resize/separable_conv2d_bn_offset","shape":[96],"dtype":"float32","quantization":{"dtype":"float16","original_dtype":"float32"}}] + } + ] +} \ No newline at end of file diff --git a/models/models.json b/models/models.json index 051fd98e..c65a53e7 100644 --- a/models/models.json +++ b/models/models.json @@ -1,6 +1,7 @@ { "antispoof": 853098, "blazeface": 538928, + "centernet": 4030290, "emotion": 820516, "facemesh": 1477958, "faceres": 6978814, @@ -8,7 +9,6 @@ "handtrack": 2964837, "iris": 2599092, "liveness": 592976, - "mb3-centernet": 4030290, "models": 0, "movenet-lightning": 4650216, "age": 161240, diff --git a/src/config.ts b/src/config.ts index 3dd4909d..661caf9a 100644 --- a/src/config.ts +++ b/src/config.ts @@ -449,7 +449,7 @@ const config: Config = { }, object: { enabled: false, - modelPath: 'mb3-centernet.json', + modelPath: 'centernet.json', minConfidence: 0.2, iouThreshold: 0.4, maxDetected: 10, diff --git a/src/face/faceres.ts b/src/face/faceres.ts index 9c8e8913..3fa28116 100644 --- a/src/face/faceres.ts +++ b/src/face/faceres.ts @@ -2,7 +2,7 @@ * FaceRes model implementation * * Returns Age, Gender, Descriptor - * Implements Face simmilarity function + * Implements Face similarity function * * Based on: [**HSE-FaceRes**](https://github.com/HSE-asavchenko/HSE_FaceRec_tf) */ diff --git a/src/human.ts b/src/human.ts index b8165ea4..2edfba27 100644 --- a/src/human.ts +++ b/src/human.ts @@ -22,11 +22,9 @@ import * as centernet from './object/centernet'; import * as efficientpose from './body/efficientpose'; import * as face from './face/face'; import * as facemesh from './face/facemesh'; -import * as faceres from './face/faceres'; import * as gesture from './gesture/gesture'; import * as handpose from './hand/handpose'; import * as handtrack from './hand/handtrack'; -import * as humangl from './tfjs/humangl'; import * as image from './image/image'; import * as interpolate from './util/interpolate'; import * as meet from './segmentation/meet'; @@ -41,7 +39,7 @@ import * as selfie from './segmentation/selfie'; import * as warmups from './warmup'; // type definitions -import { Input, DrawOptions, Config, Result, FaceResult, HandResult, BodyResult, ObjectResult, GestureResult, PersonResult, AnyCanvas, emptyResult } from './exports'; +import { Input, DrawOptions, Config, Result, FaceResult, HandResult, BodyResult, ObjectResult, GestureResult, AnyCanvas, emptyResult } from './exports'; import type { Tensor, Tensor4D } from './tfjs/types'; // type exports export * from './exports'; @@ -94,7 +92,15 @@ export class Human { * - options: are global settings for all draw operations, can be overriden for each draw method {@link DrawOptions} * - face, body, hand, gesture, object, person: draws detected results as overlays on canvas */ - draw: { canvas: typeof draw.canvas, face: typeof draw.face, body: typeof draw.body, hand: typeof draw.hand, gesture: typeof draw.gesture, object: typeof draw.object, person: typeof draw.person, all: typeof draw.all, options: DrawOptions }; + // draw: { canvas: typeof draw.canvas, face: typeof draw.face, body: typeof draw.body, hand: typeof draw.hand, gesture: typeof draw.gesture, object: typeof draw.object, person: typeof draw.person, all: typeof draw.all, options: DrawOptions }; + draw: typeof draw = draw; + + /** Face Matching + * - similarity: compare two face descriptors and return similarity index + * - distance: compare two face descriptors and return raw calculated differences + * - find: compare face descriptor to array of face descriptors and return best match + */ + match: typeof match = match; /** Currently loaded models * @internal @@ -121,8 +127,6 @@ export class Human { #numTensors: number; #analyzeMemoryLeaks: boolean; #checkSanity: boolean; - /** WebGL debug info */ - gl: Record; // definition end /** Constructor for **Human** library that is futher used for all operations @@ -153,28 +157,15 @@ export class Human { this.performance = {}; this.events = (typeof EventTarget !== 'undefined') ? new EventTarget() : undefined; // object that contains all initialized models - this.models = new models.Models(); + this.models = new models.Models(this); // reexport draw methods draw.init(); - this.draw = { - options: draw.options, - canvas: (input: AnyCanvas | HTMLImageElement | HTMLVideoElement, output: AnyCanvas) => draw.canvas(input, output), - face: (output: AnyCanvas, result: FaceResult[], options?: Partial) => draw.face(output, result, options), - body: (output: AnyCanvas, result: BodyResult[], options?: Partial) => draw.body(output, result, options), - hand: (output: AnyCanvas, result: HandResult[], options?: Partial) => draw.hand(output, result, options), - gesture: (output: AnyCanvas, result: GestureResult[], options?: Partial) => draw.gesture(output, result, options), - object: (output: AnyCanvas, result: ObjectResult[], options?: Partial) => draw.object(output, result, options), - person: (output: AnyCanvas, result: PersonResult[], options?: Partial) => draw.person(output, result, options), - all: (output: AnyCanvas, result: Result, options?: Partial) => draw.all(output, result, options), - }; this.result = emptyResult(); // export access to image processing this.process = { tensor: null, canvas: null }; // export raw access to underlying models this.faceTriangulation = facemesh.triangulation; this.faceUVMap = facemesh.uvmap; - // set gl info - this.gl = humangl.config; // init model validation models.validateModel(this, null, ''); // include platform info @@ -227,18 +218,6 @@ export class Human { return msgs; } - /** Check model for invalid kernel ops for current backend */ - check() { - return models.validate(this); - } - - /** Exports face matching methods {@link match#similarity} */ - public similarity = match.similarity; - /** Exports face matching methods {@link match#distance} */ - public distance = match.distance; - /** Exports face matching methods {@link match#match} */ - public match = match.match; - /** Utility wrapper for performance.now() */ now(): number { // eslint-disable-line class-methods-use-this return now(); @@ -273,16 +252,7 @@ export class Human { return tensor; } - /** Enhance method performs additional enhacements to face image previously detected for futher processing - * - * @param input - Tensor as provided in human.result.face[n].tensor - * @returns Tensor - */ - enhance(input: Tensor): Tensor | null { // eslint-disable-line class-methods-use-this - return faceres.enhance(input); - } - - /** Compare two input tensors for pixel simmilarity + /** Compare two input tensors for pixel similarity * - use `human.image` to process any valid input and get a tensor that can be used for compare * - when passing manually generated tensors: * - both input tensors must be in format [1, height, width, 3] @@ -325,18 +295,17 @@ export class Human { await tf.ready(); if (this.env.browser) { if (this.config.debug) log('configuration:', this.config); - // @ts-ignore private property if (this.config.debug) log('tf flags:', this.tf.ENV.flags); } } - await models.load(this); // actually loads models + await this.models.load(); // actually loads models if (this.env.initial && this.config.debug) log('tf engine state:', this.tf.engine().state.numBytes, 'bytes', this.tf.engine().state.numTensors, 'tensors'); // print memory stats on first run this.env.initial = false; const loaded = Object.values(this.models).filter((model) => model).length; if (loaded !== count) { // number of loaded models changed - models.validate(this); // validate kernel ops used by model against current backend + this.models.validate(); // validate kernel ops used by model against current backend this.emit('load'); } @@ -359,9 +328,6 @@ export class Human { return interpolate.calc(result, this.config); } - /** get model loading/loaded stats */ - getModelStats(): models.ModelStats { return models.getModelStats(this); } - /** Warmup method pre-initializes all configured models for faster inference * - can take significant time on startup * - only used for `webgl` and `humangl` backends diff --git a/src/models.ts b/src/models.ts index b9ae9d0a..75005aa9 100644 --- a/src/models.ts +++ b/src/models.ts @@ -31,136 +31,10 @@ import { modelStats, ModelInfo } from './tfjs/load'; import type { GraphModel } from './tfjs/types'; import type { Human } from './human'; -/** Instances of all possible TFJS Graph Models used by Human - * - loaded as needed based on configuration - * - initialized explictly with `human.load()` method - * - initialized implicity on first call to `human.detect()` - * - each model can be `null` if not loaded, instance of `GraphModel` if loaded or `Promise` if loading - */ -export class Models { - ssrnetage: null | GraphModel | Promise = null; - gear: null | GraphModel | Promise = null; - blazeposedetect: null | GraphModel | Promise = null; - blazepose: null | GraphModel | Promise = null; - centernet: null | GraphModel | Promise = null; - efficientpose: null | GraphModel | Promise = null; - mobilefacenet: null | GraphModel | Promise = null; - insightface: null | GraphModel | Promise = null; - emotion: null | GraphModel | Promise = null; - facedetect: null | GraphModel | Promise = null; - faceiris: null | GraphModel | Promise = null; - facemesh: null | GraphModel | Promise = null; - faceres: null | GraphModel | Promise = null; - ssrnetgender: null | GraphModel | Promise = null; - handpose: null | GraphModel | Promise = null; - handskeleton: null | GraphModel | Promise = null; - handtrack: null | GraphModel | Promise = null; - liveness: null | GraphModel | Promise = null; - meet: null | GraphModel | Promise = null; - movenet: null | GraphModel | Promise = null; - nanodet: null | GraphModel | Promise = null; - posenet: null | GraphModel | Promise = null; - selfie: null | GraphModel | Promise = null; - rvm: null | GraphModel | Promise = null; - antispoof: null | GraphModel | Promise = null; -} - -/** structure that holds global stats for currently loaded models */ -export interface ModelStats { - numLoadedModels: number, - numDefinedModels: number, - percentageLoaded: number, - totalSizeFromManifest: number, - totalSizeWeights: number, - totalSizeLoading: number, - totalSizeEnabled: undefined, - modelStats: ModelInfo[], -} - -let instance: Human; - -export const getModelStats = (currentInstance: Human): ModelStats => { - if (currentInstance) instance = currentInstance; - if (!instance) log('instance not registred'); - let totalSizeFromManifest = 0; - let totalSizeWeights = 0; - let totalSizeLoading = 0; - for (const m of Object.values(modelStats)) { - totalSizeFromManifest += m.sizeFromManifest; - totalSizeWeights += m.sizeLoadedWeights; - totalSizeLoading += m.sizeDesired; - } - const percentageLoaded = totalSizeLoading > 0 ? totalSizeWeights / totalSizeLoading : 0; - return { - numLoadedModels: Object.values(modelStats).length, - numDefinedModels: Object.keys(instance.models).length, - percentageLoaded, - totalSizeFromManifest, - totalSizeWeights, - totalSizeLoading, - totalSizeEnabled: undefined, - modelStats: Object.values(modelStats), - }; -}; - -export function reset(currentInstance: Human): void { - if (currentInstance) instance = currentInstance; - // if (instance.config.debug) log('resetting loaded models'); - for (const model of Object.keys(instance.models)) instance.models[model as keyof Models] = null; -} - -/** Load method preloads all instance.configured models on-demand */ -export async function load(currentInstance: Human): Promise { - if (currentInstance) instance = currentInstance; - if (!instance) log('instance not registred'); - if (env.initial) reset(instance); - if (instance.config.hand.enabled) { // handpose model is a combo that must be loaded as a whole - if (!instance.models.handpose && instance.config.hand.detector?.modelPath?.includes('handdetect')) { - [instance.models.handpose, instance.models.handskeleton] = await handpose.load(instance.config); - } - if (!instance.models.handskeleton && instance.config.hand.landmarks && instance.config.hand.detector?.modelPath?.includes('handdetect')) { - [instance.models.handpose, instance.models.handskeleton] = await handpose.load(instance.config); - } - } - if (instance.config.body.enabled && !instance.models.blazepose && instance.config.body.modelPath?.includes('blazepose')) instance.models.blazepose = blazepose.loadPose(instance.config); - if (instance.config.body.enabled && !instance.models.blazeposedetect && instance.config.body['detector'] && instance.config.body['detector'].modelPath) instance.models.blazeposedetect = blazepose.loadDetect(instance.config); - if (instance.config.body.enabled && !instance.models.efficientpose && instance.config.body.modelPath?.includes('efficientpose')) instance.models.efficientpose = efficientpose.load(instance.config); - if (instance.config.body.enabled && !instance.models.movenet && instance.config.body.modelPath?.includes('movenet')) instance.models.movenet = movenet.load(instance.config); - if (instance.config.body.enabled && !instance.models.posenet && instance.config.body.modelPath?.includes('posenet')) instance.models.posenet = posenet.load(instance.config); - if (instance.config.face.enabled && !instance.models.facedetect) instance.models.facedetect = blazeface.load(instance.config); - if (instance.config.face.enabled && instance.config.face.antispoof?.enabled && !instance.models.antispoof) instance.models.antispoof = antispoof.load(instance.config); - if (instance.config.face.enabled && instance.config.face.liveness?.enabled && !instance.models.liveness) instance.models.liveness = liveness.load(instance.config); - if (instance.config.face.enabled && instance.config.face.description?.enabled && !instance.models.faceres) instance.models.faceres = faceres.load(instance.config); - if (instance.config.face.enabled && instance.config.face.emotion?.enabled && !instance.models.emotion) instance.models.emotion = emotion.load(instance.config); - if (instance.config.face.enabled && instance.config.face.iris?.enabled && !instance.config.face.attention?.enabled && !instance.models.faceiris) instance.models.faceiris = iris.load(instance.config); - if (instance.config.face.enabled && instance.config.face.mesh?.enabled && (!instance.models.facemesh)) instance.models.facemesh = facemesh.load(instance.config); - if (instance.config.face.enabled && instance.config.face['gear']?.enabled && !instance.models.gear) instance.models.gear = gear.load(instance.config); - if (instance.config.face.enabled && instance.config.face['ssrnet']?.enabled && !instance.models.ssrnetage) instance.models.ssrnetage = ssrnetAge.load(instance.config); - if (instance.config.face.enabled && instance.config.face['ssrnet']?.enabled && !instance.models.ssrnetgender) instance.models.ssrnetgender = ssrnetGender.load(instance.config); - if (instance.config.face.enabled && instance.config.face['mobilefacenet']?.enabled && !instance.models.mobilefacenet) instance.models.mobilefacenet = mobilefacenet.load(instance.config); - if (instance.config.face.enabled && instance.config.face['insightface']?.enabled && !instance.models.insightface) instance.models.insightface = insightface.load(instance.config); - if (instance.config.hand.enabled && !instance.models.handtrack && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handtrack = handtrack.loadDetect(instance.config); - if (instance.config.hand.enabled && instance.config.hand.landmarks && !instance.models.handskeleton && instance.config.hand.detector?.modelPath?.includes('handtrack')) instance.models.handskeleton = handtrack.loadSkeleton(instance.config); - if (instance.config.object.enabled && !instance.models.centernet && instance.config.object.modelPath?.includes('centernet')) instance.models.centernet = centernet.load(instance.config); - if (instance.config.object.enabled && !instance.models.nanodet && instance.config.object.modelPath?.includes('nanodet')) instance.models.nanodet = nanodet.load(instance.config); - if (instance.config.segmentation.enabled && !instance.models.selfie && instance.config.segmentation.modelPath?.includes('selfie')) instance.models.selfie = selfie.load(instance.config); - if (instance.config.segmentation.enabled && !instance.models.meet && instance.config.segmentation.modelPath?.includes('meet')) instance.models.meet = meet.load(instance.config); - if (instance.config.segmentation.enabled && !instance.models.rvm && instance.config.segmentation.modelPath?.includes('rvm')) instance.models.rvm = rvm.load(instance.config); - - // models are loaded in parallel asynchronously so lets wait until they are actually loaded - for await (const model of Object.keys(instance.models)) { - if (instance.models[model as keyof Models] && typeof instance.models[model as keyof Models] !== 'undefined') { - instance.models[model as keyof Models] = await instance.models[model as keyof Models]; - } - } -} - export interface KernelOps { name: string, url: string, missing: string[], ops: string[] } -export function validateModel(currentInstance: Human | null, model: GraphModel | null, name: string): KernelOps | null { +export function validateModel(instance: Human | null, model: GraphModel | null, name: string): KernelOps | null { if (!model) return null; - if (currentInstance) instance = currentInstance; - if (!instance) log('instance not registred'); if (!instance?.config?.validateModels) return null; const simpleOps = ['const', 'placeholder', 'noop', 'pad', 'squeeze', 'add', 'sub', 'mul', 'div']; const ignoreOps = ['biasadd', 'fusedbatchnormv3', 'matmul', 'switch', 'shape', 'merge', 'split', 'broadcastto']; @@ -193,15 +67,124 @@ export function validateModel(currentInstance: Human | null, model: GraphModel | return missing.length > 0 ? { name, missing, ops, url } : null; } -export function validate(currentInstance: Human): { name: string, missing: string[] }[] { - if (currentInstance) instance = currentInstance; - if (!instance) log('instance not registred'); - const missing: KernelOps[] = []; - for (const defined of Object.keys(currentInstance.models)) { - const model: GraphModel | null = currentInstance.models[defined as keyof Models] as GraphModel | null; - if (!model) continue; - const res = validateModel(currentInstance, model, defined); - if (res) missing.push(res); - } - return missing; +/** structure that holds global stats for currently loaded models */ +export interface ModelStats { + numLoadedModels: number, + numDefinedModels: number, + percentageLoaded: number, + totalSizeFromManifest: number, + totalSizeWeights: number, + totalSizeLoading: number, + modelStats: ModelInfo[], +} + +/** Models class used by Human + * - models: record of all GraphModels + * - list: returns list of configured models with their stats + * - loaded: returns array of loaded models + * - reset: unloads all models + * - validate: checks loaded models for valid kernel ops vs current backend + * - stats: live detailed model stats that can be checked during model load phase + */ +export class Models { + instance: Human; + models: Record; + + constructor(currentInstance: Human) { + this.models = {}; + this.instance = currentInstance; + } + + stats(): ModelStats { + let totalSizeFromManifest = 0; + let totalSizeWeights = 0; + let totalSizeLoading = 0; + for (const m of Object.values(modelStats)) { + totalSizeFromManifest += m.sizeFromManifest; + totalSizeWeights += m.sizeLoadedWeights; + totalSizeLoading += m.sizeDesired; + } + const percentageLoaded = totalSizeLoading > 0 ? totalSizeWeights / totalSizeLoading : 0; + return { + numLoadedModels: Object.values(modelStats).length, + numDefinedModels: Object.keys(this.models).length, + percentageLoaded, + totalSizeFromManifest, + totalSizeWeights, + totalSizeLoading, + modelStats: Object.values(modelStats), + }; + } + + reset(): void { + for (const model of Object.keys(this.models)) this.models[model] = null; + } + + async load(): Promise { + if (env.initial) this.reset(); + const m: Record> = {}; + // face main models + m.blazeface = (this.instance.config.face.enabled && !this.models.blazeface) ? blazeface.load(this.instance.config) : null; + m.antispoof = (this.instance.config.face.enabled && this.instance.config.face.antispoof?.enabled && !this.models.antispoof) ? antispoof.load(this.instance.config) : null; + m.liveness = (this.instance.config.face.enabled && this.instance.config.face.liveness?.enabled && !this.models.liveness) ? liveness.load(this.instance.config) : null; + m.faceres = (this.instance.config.face.enabled && this.instance.config.face.description?.enabled && !this.models.faceres) ? faceres.load(this.instance.config) : null; + m.emotion = (this.instance.config.face.enabled && this.instance.config.face.emotion?.enabled && !this.models.emotion) ? emotion.load(this.instance.config) : null; + m.iris = (this.instance.config.face.enabled && this.instance.config.face.iris?.enabled && !this.instance.config.face.attention?.enabled && !this.models.iris) ? iris.load(this.instance.config) : null; + m.facemesh = (this.instance.config.face.enabled && this.instance.config.face.mesh?.enabled && (!this.models.facemesh)) ? facemesh.load(this.instance.config) : null; + // face alternatives + m.gear = (this.instance.config.face.enabled && this.instance.config.face['gear']?.enabled && !this.models.gear) ? gear.load(this.instance.config) : null; + m.ssrnetage = (this.instance.config.face.enabled && this.instance.config.face['ssrnet']?.enabled && !this.models.ssrnetage) ? ssrnetAge.load(this.instance.config) : null; + m.ssrnetgender = (this.instance.config.face.enabled && this.instance.config.face['ssrnet']?.enabled && !this.models.ssrnetgender) ? ssrnetGender.load(this.instance.config) : null; + m.mobilefacenet = (this.instance.config.face.enabled && this.instance.config.face['mobilefacenet']?.enabled && !this.models.mobilefacenet) ? mobilefacenet.load(this.instance.config) : null; + m.insightface = (this.instance.config.face.enabled && this.instance.config.face['insightface']?.enabled && !this.models.insightface) ? insightface.load(this.instance.config) : null; + // body alterinatives + m.blazepose = (this.instance.config.body.enabled && !this.models.blazepose && this.instance.config.body.modelPath?.includes('blazepose')) ? blazepose.loadPose(this.instance.config) : null; + m.blazeposedetect = (this.instance.config.body.enabled && !this.models.blazeposedetect && this.instance.config.body['detector'] && this.instance.config.body['detector'].modelPath) ? blazepose.loadDetect(this.instance.config) : null; + m.efficientpose = (this.instance.config.body.enabled && !this.models.efficientpose && this.instance.config.body.modelPath?.includes('efficientpose')) ? efficientpose.load(this.instance.config) : null; + m.movenet = (this.instance.config.body.enabled && !this.models.movenet && this.instance.config.body.modelPath?.includes('movenet')) ? movenet.load(this.instance.config) : null; + m.posenet = (this.instance.config.body.enabled && !this.models.posenet && this.instance.config.body.modelPath?.includes('posenet')) ? posenet.load(this.instance.config) : null; + // hand alternatives + m.handtrack = (this.instance.config.hand.enabled && !this.models.handtrack && this.instance.config.hand.detector?.modelPath?.includes('handtrack')) ? handtrack.loadDetect(this.instance.config) : null; + m.handskeleton = (this.instance.config.hand.enabled && this.instance.config.hand.landmarks && !this.models.handskeleton && this.instance.config.hand.detector?.modelPath?.includes('handtrack')) ? handtrack.loadSkeleton(this.instance.config) : null; + if (this.instance.config.hand.detector?.modelPath?.includes('handdetect')) [m.handpose, m.handskeleton] = (!this.models.handpose) ? await handpose.load(this.instance.config) : [null, null]; + // object detection alternatives + m.centernet = (this.instance.config.object.enabled && !this.models.centernet && this.instance.config.object.modelPath?.includes('centernet')) ? centernet.load(this.instance.config) : null; + m.nanodet = (this.instance.config.object.enabled && !this.models.nanodet && this.instance.config.object.modelPath?.includes('nanodet')) ? nanodet.load(this.instance.config) : null; + // segmentation alternatives + m.selfie = (this.instance.config.segmentation.enabled && !this.models.selfie && this.instance.config.segmentation.modelPath?.includes('selfie')) ? selfie.load(this.instance.config) : null; + m.meet = (this.instance.config.segmentation.enabled && !this.models.meet && this.instance.config.segmentation.modelPath?.includes('meet')) ? meet.load(this.instance.config) : null; + m.rvm = (this.instance.config.segmentation.enabled && !this.models.rvm && this.instance.config.segmentation.modelPath?.includes('rvm')) ? rvm.load(this.instance.config) : null; + + // models are loaded in parallel asynchronously so lets wait until they are actually loaded + await Promise.all([...Object.values(m)]); + for (const model of Object.keys(m)) this.models[model] = m[model] as GraphModel || this.models[model] || null; // only update actually loaded models + } + + list() { + const models = Object.keys(this.models).map((model) => ({ name: model, loaded: (this.models[model] !== null), size: 0, url: this.models[model] ? this.models[model]?.['modelUrl'] : null })); + for (const m of models) { + const stats = Object.keys(modelStats).find((s) => s.startsWith(m.name)); + if (!stats) continue; + m.size = modelStats[stats].sizeLoadedWeights; + m.url = modelStats[stats].url; + } + return models; + } + + loaded() { + const list = this.list(); + const loaded = list.filter((model) => model.loaded).map((model) => model.name); + return loaded; + } + + validate(): { name: string, missing: string[] }[] { + const missing: KernelOps[] = []; + for (const defined of Object.keys(this.models)) { + const model: GraphModel | null = this.models[defined as keyof Models]; + if (!model) continue; + const res = validateModel(this.instance, model, defined); + if (res) missing.push(res); + } + return missing; + } } diff --git a/src/tfjs/humangl.ts b/src/tfjs/humangl.ts index 5dcbb722..5e5e024e 100644 --- a/src/tfjs/humangl.ts +++ b/src/tfjs/humangl.ts @@ -4,7 +4,6 @@ import * as tf from 'dist/tfjs.esm.js'; import type { Human } from '../human'; import { log } from '../util/util'; import * as image from '../image/image'; -import * as models from '../models'; import type { AnyCanvas } from '../exports'; export const config = { @@ -46,7 +45,7 @@ export function register(instance: Human): void { if (instance.config.backend !== 'humangl') return; if ((config.name in tf.engine().registry) && !config?.gl?.getParameter(config.gl.VERSION)) { log('humangl error: backend invalid context'); - models.reset(instance); + instance.models.reset(); /* log('resetting humangl backend'); await tf.removeBackend(config.name); diff --git a/src/tfjs/load.ts b/src/tfjs/load.ts index 3772afce..ab9cf993 100644 --- a/src/tfjs/load.ts +++ b/src/tfjs/load.ts @@ -18,6 +18,7 @@ export interface ModelInfo { sizeDesired: number, sizeFromManifest: number, sizeLoadedWeights: number, + url: string, } export const modelStats: Record = {}; @@ -45,6 +46,7 @@ export async function loadModel(modelPath: string | undefined): Promise httpHandler(url, init) }; - let model: GraphModel = new tf.GraphModel(modelStats[shortModelName].inCache ? cachedModelName : modelUrl, tfLoadOptions) as unknown as GraphModel; // create model prototype and decide if load from cache or from original modelurl + let model: GraphModel = new tf.GraphModel(modelStats[shortModelName].url, tfLoadOptions) as unknown as GraphModel; // create model prototype and decide if load from cache or from original modelurl let loaded = false; try { // @ts-ignore private function diff --git a/src/warmup.ts b/src/warmup.ts index ec0490ab..69f61fd2 100644 --- a/src/warmup.ts +++ b/src/warmup.ts @@ -8,9 +8,9 @@ import * as sample from './sample'; import * as image from './image/image'; import * as backend from './tfjs/backend'; import { env } from './util/env'; -import type { Config } from './config'; import { emptyResult, Result } from './result'; -import { Human, models } from './human'; +import type { Config } from './config'; +import type { Human } from './human'; import type { Tensor, DataType } from './tfjs/types'; async function warmupBitmap(instance: Human): Promise { @@ -161,7 +161,7 @@ export async function warmup(instance: Human, userConfig?: Partial): Pro return emptyResult(); } return new Promise(async (resolve) => { - await models.load(instance); + await instance.models.load(); await runCompile(instance); const res = await runInference(instance); const t1 = now(); diff --git a/test/build.log b/test/build.log index 257dd75b..65ca6407 100644 --- a/test/build.log +++ b/test/build.log @@ -1,105 +1,56 @@ -2022-11-17 10:11:23 DATA:  Build {"name":"@vladmandic/human","version":"3.0.0"} -2022-11-17 10:11:23 INFO:  Application: {"name":"@vladmandic/human","version":"3.0.0"} -2022-11-17 10:11:23 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true} -2022-11-17 10:11:23 INFO:  Toolchain: {"build":"0.7.14","esbuild":"0.15.14","typescript":"4.9.3","typedoc":"0.23.21","eslint":"8.27.0"} -2022-11-17 10:11:23 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]} -2022-11-17 10:11:23 STATE: Clean: {"locations":["dist/*","types/*","typedoc/*"]} -2022-11-17 10:11:23 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1289,"outputBytes":361} -2022-11-17 10:11:23 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":569,"outputBytes":924} -2022-11-17 10:11:23 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":80,"inputBytes":673583,"outputBytes":317615} -2022-11-17 10:11:23 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":577,"outputBytes":928} -2022-11-17 10:11:23 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":80,"inputBytes":673587,"outputBytes":317619} -2022-11-17 10:11:23 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":665,"outputBytes":1876} -2022-11-17 10:11:23 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":80,"inputBytes":674535,"outputBytes":317730} -2022-11-17 10:11:23 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1375,"outputBytes":670} -2022-11-17 10:11:23 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":80,"inputBytes":673329,"outputBytes":316181} -2022-11-17 10:11:23 STATE: Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":10,"inputBytes":1375,"outputBytes":1144900} -2022-11-17 10:11:23 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":80,"inputBytes":1817559,"outputBytes":1457643} -2022-11-17 10:11:23 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":80,"inputBytes":1817559,"outputBytes":1917209} -2022-11-17 10:11:27 STATE: Typings: {"input":"src/human.ts","output":"types/lib","files":15} -2022-11-17 10:11:29 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":77,"generated":true} -2022-11-17 10:11:29 STATE: Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6136,"outputBytes":2914} -2022-11-17 10:11:29 STATE: Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17174,"outputBytes":9251} -2022-11-17 10:11:37 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":114,"errors":0,"warnings":0} -2022-11-17 10:11:37 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"} -2022-11-17 10:11:37 STATE: Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs-core.d.ts","output":"types/tfjs-core.d.ts"} -2022-11-17 10:11:37 INFO:  Done... -2022-11-17 10:11:37 STATE: Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs.d.ts","output":"types/tfjs.esm.d.ts"} -2022-11-17 10:11:37 STATE: Copy: {"input":"src/types/tsconfig.json","output":"types/tsconfig.json"} -2022-11-17 10:11:37 STATE: Copy: {"input":"src/types/eslint.json","output":"types/.eslintrc.json"} -2022-11-17 10:11:37 STATE: Copy: {"input":"src/types/tfjs.esm.d.ts","output":"dist/tfjs.esm.d.ts"} -2022-11-17 10:11:37 STATE: Filter: {"input":"types/tfjs-core.d.ts"} -2022-11-17 10:11:38 STATE: API-Extractor: {"succeeeded":true,"errors":0,"warnings":195} -2022-11-17 10:11:38 STATE: Filter: {"input":"types/human.d.ts"} -2022-11-17 10:11:38 STATE: Write: {"output":"dist/human.esm-nobundle.d.ts"} -2022-11-17 10:11:38 STATE: Write: {"output":"dist/human.esm.d.ts"} -2022-11-17 10:11:38 STATE: Write: {"output":"dist/human.d.ts"} -2022-11-17 10:11:38 STATE: Write: {"output":"dist/human.node-gpu.d.ts"} -2022-11-17 10:11:38 STATE: Write: {"output":"dist/human.node.d.ts"} -2022-11-17 10:11:38 STATE: Write: {"output":"dist/human.node-wasm.d.ts"} -2022-11-17 10:11:38 INFO:  Analyze models: {"folders":8,"result":"models/models.json"} -2022-11-17 10:11:38 STATE: Models {"folder":"./models","models":12} -2022-11-17 10:11:38 STATE: Models {"folder":"../human-models/models","models":43} -2022-11-17 10:11:38 STATE: Models {"folder":"../blazepose/model/","models":4} -2022-11-17 10:11:38 STATE: Models {"folder":"../anti-spoofing/model","models":1} -2022-11-17 10:11:38 STATE: Models {"folder":"../efficientpose/models","models":3} -2022-11-17 10:11:38 STATE: Models {"folder":"../insightface/models","models":5} -2022-11-17 10:11:38 STATE: Models {"folder":"../movenet/models","models":3} -2022-11-17 10:11:38 STATE: Models {"folder":"../nanodet/models","models":4} -2022-11-17 10:11:39 STATE: Models: {"count":58,"totalSize":386543911} -2022-11-17 10:11:39 INFO:  Human Build complete... {"logFile":"test/build.log"} -2022-11-17 10:16:08 INFO:  @vladmandic/human version 3.0.0 -2022-11-17 10:16:08 INFO:  User: vlado Platform: linux Arch: x64 Node: v19.1.0 -2022-11-17 10:16:08 INFO:  Application: {"name":"@vladmandic/human","version":"3.0.0"} -2022-11-17 10:16:08 INFO:  Environment: {"profile":"development","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true} -2022-11-17 10:16:08 INFO:  Toolchain: {"build":"0.7.14","esbuild":"0.15.14","typescript":"4.9.3","typedoc":"0.23.21","eslint":"8.27.0"} -2022-11-17 10:16:08 INFO:  Build: {"profile":"development","steps":["serve","watch","compile"]} -2022-11-17 10:16:08 STATE: WebServer: {"ssl":false,"port":8000,"root":"."} -2022-11-17 10:16:08 STATE: WebServer: {"ssl":true,"port":8001,"root":".","sslKey":"node_modules/@vladmandic/build/cert/https.key","sslCrt":"node_modules/@vladmandic/build/cert/https.crt"} -2022-11-17 10:16:08 STATE: Watch: {"locations":["src/**/*","tfjs/**/*","demo/**/*.ts"]} -2022-11-17 10:16:08 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1289,"outputBytes":1357} -2022-11-17 10:16:08 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1565,"outputBytes":1786} -2022-11-17 10:16:08 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":80,"inputBytes":674445,"outputBytes":507569} -2022-11-17 10:16:08 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1573,"outputBytes":1810} -2022-11-17 10:16:08 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":80,"inputBytes":674469,"outputBytes":507589} -2022-11-17 10:16:08 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1661,"outputBytes":1992} -2022-11-17 10:16:08 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":80,"inputBytes":674651,"outputBytes":507780} -2022-11-17 10:16:08 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":2371,"outputBytes":923} -2022-11-17 10:16:08 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":80,"inputBytes":673582,"outputBytes":510177} -2022-11-17 10:16:09 STATE: Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":10,"inputBytes":2371,"outputBytes":1144900} -2022-11-17 10:16:09 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":80,"inputBytes":1817559,"outputBytes":1457643} -2022-11-17 10:16:09 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":80,"inputBytes":1817559,"outputBytes":1917209} -2022-11-17 10:16:09 STATE: Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6136,"outputBytes":4208} -2022-11-17 10:16:09 STATE: Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17567,"outputBytes":13914} -2022-11-17 10:16:09 INFO:  Listening... -2022-11-17 10:16:20 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":301,"url":"/demo/typescript","redirect":"/demo/typescript/index.html","remote":"::1"} -2022-11-17 10:16:20 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/html","size":1953,"url":"/demo/typescript/index.html","remote":"::1"} -2022-11-17 10:16:20 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/javascript","size":4208,"url":"/demo/typescript/index.js","remote":"::1"} -2022-11-17 10:16:22 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":301,"url":"/demo/faceid","redirect":"/demo/faceid/index.html","remote":"::1"} -2022-11-17 10:16:22 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/html","size":3415,"url":"/demo/faceid/index.html","remote":"::1"} -2022-11-17 10:16:22 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/javascript","size":13914,"url":"/demo/faceid/index.js","remote":"::1"} -2022-11-17 10:16:22 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/javascript","size":1917209,"url":"/dist/human.esm.js","remote":"::1"} -2022-11-17 10:16:22 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"font/woff2","size":181500,"url":"/assets/lato-light.woff2","remote":"::1"} -2022-11-17 10:16:22 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/manifest+json","size":304,"url":"/demo/manifest.webmanifest","remote":"::1"} -2022-11-17 10:16:22 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"image/png","size":142790,"url":"/assets/icon.png","remote":"::1"} -2022-11-17 10:16:23 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/manifest+json","size":304,"url":"/demo/manifest.webmanifest","remote":"::1"} -2022-11-17 10:16:25 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/octet-stream","size":28470,"url":"/demo/faceid/index.js.map","remote":"::1"} -2022-11-17 10:16:25 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/octet-stream","size":3385692,"url":"/dist/human.esm.js.map","remote":"::1"} -2022-11-17 10:16:39 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/html","size":3415,"url":"/demo/faceid/index.html","remote":"::1"} -2022-11-17 10:16:39 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/javascript","size":13914,"url":"/demo/faceid/index.js","remote":"::1"} -2022-11-17 10:16:39 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"font/woff2","size":181500,"url":"/assets/lato-light.woff2","remote":"::1"} -2022-11-17 10:16:39 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/javascript","size":1917209,"url":"/dist/human.esm.js","remote":"::1"} -2022-11-17 10:16:39 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/octet-stream","size":28470,"url":"/demo/faceid/index.js.map","remote":"::1"} -2022-11-17 10:16:39 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/octet-stream","size":3385692,"url":"/dist/human.esm.js.map","remote":"::1"} -2022-11-17 10:16:39 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"image/x-icon","size":261950,"url":"/favicon.ico","remote":"::1"} -2022-11-17 10:16:39 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/manifest+json","size":304,"url":"/demo/manifest.webmanifest","remote":"::1"} -2022-11-17 10:16:39 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"image/png","size":142790,"url":"/assets/icon.png","remote":"::1"} -2022-11-17 10:17:25 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":301,"url":"/demo/typescript","redirect":"/demo/typescript/index.html","remote":"::1"} -2022-11-17 10:17:25 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/html","size":1953,"url":"/demo/typescript/index.html","remote":"::1"} -2022-11-17 10:17:25 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/javascript","size":4208,"url":"/demo/typescript/index.js","remote":"::1"} -2022-11-17 10:17:25 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"font/woff2","size":181500,"url":"/assets/lato-light.woff2","remote":"::1"} -2022-11-17 10:17:25 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"text/javascript","size":1917209,"url":"/dist/human.esm.js","remote":"::1"} -2022-11-17 10:17:25 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/octet-stream","size":9470,"url":"/demo/typescript/index.js.map","remote":"::1"} -2022-11-17 10:17:25 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/octet-stream","size":3385692,"url":"/dist/human.esm.js.map","remote":"::1"} -2022-11-17 10:17:25 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"application/manifest+json","size":304,"url":"/demo/manifest.webmanifest","remote":"::1"} -2022-11-17 10:17:25 DATA:  HTTPS: {"method":"GET","ver":"2.0","status":200,"mime":"image/png","size":142790,"url":"/assets/icon.png","remote":"::1"} +2022-11-17 14:37:08 DATA:  Build {"name":"@vladmandic/human","version":"3.0.0"} +2022-11-17 14:37:08 INFO:  Application: {"name":"@vladmandic/human","version":"3.0.0"} +2022-11-17 14:37:08 INFO:  Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true} +2022-11-17 14:37:08 INFO:  Toolchain: {"build":"0.7.14","esbuild":"0.15.14","typescript":"4.9.3","typedoc":"0.23.21","eslint":"8.27.0"} +2022-11-17 14:37:08 INFO:  Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]} +2022-11-17 14:37:08 STATE: Clean: {"locations":["dist/*","types/*","typedoc/*"]} +2022-11-17 14:37:08 STATE: Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1289,"outputBytes":361} +2022-11-17 14:37:08 STATE: Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":569,"outputBytes":924} +2022-11-17 14:37:08 STATE: Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":80,"inputBytes":670179,"outputBytes":317460} +2022-11-17 14:37:08 STATE: Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":577,"outputBytes":928} +2022-11-17 14:37:08 STATE: Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":80,"inputBytes":670183,"outputBytes":317464} +2022-11-17 14:37:08 STATE: Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":665,"outputBytes":1876} +2022-11-17 14:37:08 STATE: Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":80,"inputBytes":671131,"outputBytes":317575} +2022-11-17 14:37:08 STATE: Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1375,"outputBytes":670} +2022-11-17 14:37:08 STATE: Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":80,"inputBytes":669925,"outputBytes":316039} +2022-11-17 14:37:08 STATE: Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":10,"inputBytes":1375,"outputBytes":1144900} +2022-11-17 14:37:08 STATE: Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":80,"inputBytes":1814155,"outputBytes":1457353} +2022-11-17 14:37:09 STATE: Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":80,"inputBytes":1814155,"outputBytes":1914737} +2022-11-17 14:37:12 STATE: Typings: {"input":"src/human.ts","output":"types/lib","files":15} +2022-11-17 14:37:14 STATE: TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":77,"generated":true} +2022-11-17 14:37:14 STATE: Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6135,"outputBytes":2913} +2022-11-17 14:37:14 STATE: Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17572,"outputBytes":9456} +2022-11-17 14:37:22 STATE: Lint: {"locations":["*.json","src/**/*.ts","test/**/*.js","demo/**/*.js"],"files":114,"errors":0,"warnings":1} +2022-11-17 14:37:22 WARN:  +/home/vlado/dev/human/src/human.ts + 42:17 warning 'DrawOptions' is defined but never used @typescript-eslint/no-unused-vars + +✖ 1 problem (0 errors, 1 warning) + +2022-11-17 14:37:22 STATE: ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"} +2022-11-17 14:37:22 STATE: Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs-core.d.ts","output":"types/tfjs-core.d.ts"} +2022-11-17 14:37:22 INFO:  Done... +2022-11-17 14:37:22 STATE: Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs.d.ts","output":"types/tfjs.esm.d.ts"} +2022-11-17 14:37:22 STATE: Copy: {"input":"src/types/tsconfig.json","output":"types/tsconfig.json"} +2022-11-17 14:37:22 STATE: Copy: {"input":"src/types/eslint.json","output":"types/.eslintrc.json"} +2022-11-17 14:37:22 STATE: Copy: {"input":"src/types/tfjs.esm.d.ts","output":"dist/tfjs.esm.d.ts"} +2022-11-17 14:37:22 STATE: Filter: {"input":"types/tfjs-core.d.ts"} +2022-11-17 14:37:23 STATE: API-Extractor: {"succeeeded":true,"errors":0,"warnings":195} +2022-11-17 14:37:23 STATE: Filter: {"input":"types/human.d.ts"} +2022-11-17 14:37:23 STATE: Write: {"output":"dist/human.esm-nobundle.d.ts"} +2022-11-17 14:37:23 STATE: Write: {"output":"dist/human.esm.d.ts"} +2022-11-17 14:37:23 STATE: Write: {"output":"dist/human.d.ts"} +2022-11-17 14:37:23 STATE: Write: {"output":"dist/human.node-gpu.d.ts"} +2022-11-17 14:37:23 STATE: Write: {"output":"dist/human.node.d.ts"} +2022-11-17 14:37:23 STATE: Write: {"output":"dist/human.node-wasm.d.ts"} +2022-11-17 14:37:23 INFO:  Analyze models: {"folders":8,"result":"models/models.json"} +2022-11-17 14:37:23 STATE: Models {"folder":"./models","models":12} +2022-11-17 14:37:23 STATE: Models {"folder":"../human-models/models","models":43} +2022-11-17 14:37:23 STATE: Models {"folder":"../blazepose/model/","models":4} +2022-11-17 14:37:23 STATE: Models {"folder":"../anti-spoofing/model","models":1} +2022-11-17 14:37:23 STATE: Models {"folder":"../efficientpose/models","models":3} +2022-11-17 14:37:23 STATE: Models {"folder":"../insightface/models","models":5} +2022-11-17 14:37:23 STATE: Models {"folder":"../movenet/models","models":3} +2022-11-17 14:37:23 STATE: Models {"folder":"../nanodet/models","models":4} +2022-11-17 14:37:24 STATE: Models: {"count":58,"totalSize":386543911} +2022-11-17 14:37:24 INFO:  Human Build complete... {"logFile":"test/build.log"} diff --git a/test/test-browser-esm.js b/test/test-browser-esm.js index 80e6414d..a5ec5ea2 100644 --- a/test/test-browser-esm.js +++ b/test/test-browser-esm.js @@ -75,7 +75,7 @@ async function testDefault(title, testConfig = {}) { await human.load(); const models = Object.keys(human.models).map((model) => ({ name: model, loaded: (human.models[model] !== null) })); log(' models', models); - const ops = await human.check(); + const ops = await human.models.validate(); if (ops && ops.length > 0) log(' missing ops', ops); const img = await image('../../samples/in/ai-body.jpg'); const input = await human.image(img); // process image @@ -108,7 +108,7 @@ async function testMatch() { const similarity = await human.similarity(desc1, desc2); const descArray = []; for (let i = 0; i < 100; i++) descArray.push(desc2); - const match = await human.match(desc1, descArray); + const match = await human.match.find(desc1, descArray); log(`test similarity/${human.tf.getBackend()}`, match, similarity); } diff --git a/test/test-node-load.js b/test/test-node-load.js index a59f06fb..f487475b 100644 --- a/test/test-node-load.js +++ b/test/test-node-load.js @@ -9,22 +9,21 @@ const log = (status, ...data) => { async function main() { const human = new Human.Human(); // create instance of human using default configuration const startTime = new Date(); - log('info', 'load start', { human: human.version, tf: tf.version_core, progress: human.getModelStats().percentageLoaded }); + log('info', 'load start', { human: human.version, tf: tf.version_core, progress: human.models.stats().percentageLoaded }); async function monitor() { - const progress = human.getModelStats().percentageLoaded; + const progress = human.models.stats().percentageLoaded; log('data', 'load interval', { elapsed: new Date() - startTime, progress }); if (progress < 1) setTimeout(monitor, 10); } monitor(); - // setInterval(() => log('interval', { elapsed: new Date() - startTime, progress: human.getModelStats().percentageLoaded })); const loadPromise = human.load(); loadPromise - .then(() => log('state', 'passed', { progress: human.getModelStats().percentageLoaded })) + .then(() => log('state', 'passed', { progress: human.models.stats().percentageLoaded })) .catch(() => log('error', 'load promise')); await loadPromise; - log('info', 'load final', { progress: human.getModelStats().percentageLoaded }); + log('info', 'load final', { progress: human.models.stats().percentageLoaded }); await human.warmup(); // optional as model warmup is performed on-demand first time its executed } diff --git a/test/test-node-main.js b/test/test-node-main.js index cfbceaaa..6f718311 100644 --- a/test/test-node-main.js +++ b/test/test-node-main.js @@ -138,7 +138,7 @@ async function testDetect(human, input, title, checkLeak = true) { lastOp = `testDetect ${title}`; log('state', 'start', title); await human.load(config); - const missing = human.check(); + const missing = human.models.validate(); for (const op of missing) log('warn', 'missing kernel ops', { title, model: op.name, url: op.url, missing: op.missing, backkend: human.tf.getBackend() }); const tensors = human.tf.engine().state.numTensors; const image = input ? await getImage(human, input) : human.tf.randomNormal([1, 1024, 1024, 3]); @@ -189,7 +189,7 @@ async function verifyDetails(human) { verify(res.face.length === 1, 'details face length', res.face.length); for (const face of res.face) { verify(face.score > 0.9 && face.boxScore > 0.9 && face.faceScore > 0.9, 'details face score', face.score, face.boxScore, face.faceScore); - verify(face.age > 23 && face.age < 30 && face.gender === 'female' && face.genderScore > 0.9 && face.iris > 0.5 && face.distance < 2.5, 'details face age/gender', face.age, face.gender, face.genderScore, face.distance); + verify(face.age > 23 && face.age < 30 && face.gender === 'female' && face.genderScore > 0.9 && face.distance > 0.5 && face.distance < 2.5, 'details face age/gender', face.age, face.gender, face.genderScore, face.distance); verify(face.box.length === 4 && face.boxRaw.length === 4 && face.mesh.length === 478 && face.meshRaw.length === 478 && face.embedding.length === 1024, 'details face arrays', face.box.length, face.mesh.length, face.embedding.length); verify(face.emotion.length >= 2 && face.emotion[0].score > 0.30 && face.emotion[0].emotion === 'angry', 'details face emotion', face.emotion.length, face.emotion[0]); verify(face.real > 0.55, 'details face anti-spoofing', face.real); @@ -293,9 +293,9 @@ async function test(Human, inputConfig) { // test model loading log('info', 'test: model load'); await human.load(); - const models = Object.keys(human.models).map((model) => ({ name: model, loaded: (human.models[model] !== null), url: human.models[model] ? human.models[model]['modelUrl'] : null })); - const loaded = models.filter((model) => model.loaded); - if (models.length === 25 && loaded.length === 11) log('state', 'passed: models loaded', models.length, loaded.length, models); + const models = human.models.list(); + const loaded = human.models.loaded(); + if (models.length === 24 && loaded.length === 11) log('state', 'passed: models loaded', models.length, loaded.length, models); else log('error', 'failed: models loaded', models.length, loaded.length, models); log('info', 'memory:', { memory: human.tf.memory() }); log('info', 'state:', { state: human.tf.engine().state }); @@ -380,15 +380,15 @@ async function test(Human, inputConfig) { const desc3 = res3 && res3.face && res3.face[0] && res3.face[0].embedding ? [...res3.face[0].embedding] : null; if (!desc1 || !desc2 || !desc3 || desc1.length !== 1024 || desc2.length !== 1024 || desc3.length !== 1024) log('error', 'failed: face descriptor', desc1?.length, desc2?.length, desc3?.length); else log('state', 'passed: face descriptor'); - res1 = human.similarity(desc1, desc1); - res2 = human.similarity(desc1, desc2); - res3 = human.similarity(desc1, desc3); + res1 = human.match.similarity(desc1, desc1); + res2 = human.match.similarity(desc1, desc2); + res3 = human.match.similarity(desc1, desc3); if (res1 < 1 || res2 < 0.40 || res3 < 0.40 || res2 > 0.75 || res3 > 0.75) log('error', 'failed: face similarity', { similarity: [res1, res2, res3], descriptors: [desc1?.length, desc2?.length, desc3?.length] }); else log('state', 'passed: face similarity', { similarity: [res1, res2, res3], descriptors: [desc1?.length, desc2?.length, desc3?.length] }); // test object detection log('info', 'test object'); - config.object = { enabled: true, modelPath: 'mb3-centernet.json' }; + config.object = { enabled: true, modelPath: 'centernet.json' }; res = await testDetect(human, 'samples/in/ai-body.jpg', 'object'); if (!res || res.object?.length < 1 || res.object[0]?.label !== 'person') log('error', 'failed: centernet', res.object); else log('state', 'passed: centernet'); @@ -461,9 +461,9 @@ async function test(Human, inputConfig) { const arr = db.map((rec) => rec.embedding); if (db.length < 20) log('error', 'failed: face database ', db.length); else log('state', 'passed: face database', db.length); - res1 = human.match(desc1, arr); - res2 = human.match(desc2, arr); - res3 = human.match(desc3, arr); + res1 = human.match.find(desc1, arr); + res2 = human.match.find(desc2, arr); + res3 = human.match.find(desc3, arr); if (res1.index !== 4 || res2.index !== 4 || res3.index !== 4) log('error', 'failed: face match', res1, res2, res3); else log('state', 'passed: face match', { first: { index: res1.index, similarity: res1.similarity } }, { second: { index: res2.index, similarity: res2.similarity } }, { third: { index: res3.index, similarity: res3.similarity } }); diff --git a/test/test.log b/test/test.log index 0962b59f..34f79111 100644 --- a/test/test.log +++ b/test/test.log @@ -1,658 +1,657 @@ -2022-11-16 17:45:10 INFO:  @vladmandic/human version 3.0.0 -2022-11-16 17:45:10 INFO:  User: vlado Platform: linux Arch: x64 Node: v19.1.0 -2022-11-16 17:45:10 INFO:  demos: [{"cmd":"../demo/nodejs/node.js","args":[]},{"cmd":"../demo/nodejs/node-simple.js","args":[]},{"cmd":"../demo/nodejs/node-event.js","args":["samples/in/ai-body.jpg"]},{"cmd":"../demo/nodejs/node-similarity.js","args":["samples/in/ai-face.jpg","samples/in/ai-upper.jpg"]},{"cmd":"../demo/nodejs/node-canvas.js","args":["samples/in/ai-body.jpg","samples/out/ai-body.jpg"]},{"cmd":"../demo/nodejs/process-folder.js","args":["samples"]},{"cmd":"../demo/multithread/node-multiprocess.js","args":[]},{"cmd":"../demo/facematch/node-match.js","args":[]}] -2022-11-16 17:45:10 INFO:  {"cmd":"../demo/nodejs/node.js","args":[]} start -2022-11-16 17:45:11 INFO:  {"cmd":"../demo/nodejs/node-simple.js","args":[]} start -2022-11-16 17:45:12 INFO:  {"cmd":"../demo/nodejs/node-event.js","args":["samples/in/ai-body.jpg"]} start -2022-11-16 17:45:13 INFO:  {"cmd":"../demo/nodejs/node-similarity.js","args":["samples/in/ai-face.jpg","samples/in/ai-upper.jpg"]} start -2022-11-16 17:45:13 INFO:  {"cmd":"../demo/nodejs/node-canvas.js","args":["samples/in/ai-body.jpg","samples/out/ai-body.jpg"]} start -2022-11-16 17:45:14 INFO:  {"cmd":"../demo/nodejs/process-folder.js","args":["samples"]} start -2022-11-16 17:45:15 INFO:  {"cmd":"../demo/multithread/node-multiprocess.js","args":[]} start -2022-11-16 17:45:27 INFO:  {"cmd":"../demo/facematch/node-match.js","args":[]} start -2022-11-16 17:45:28 INFO:  tests: ["test-node-load.js","test-node-gear.js","test-backend-node.js","test-backend-node-gpu.js","test-backend-node-wasm.js"] -2022-11-16 17:45:28 INFO:  -2022-11-16 17:45:28 INFO:  test-node-load.js start -2022-11-16 17:45:28 INFO:  test-node-load.js load start {"human":"3.0.0","tf":"4.0.0","progress":0} -2022-11-16 17:45:28 DATA:  test-node-load.js load interval {"elapsed":1,"progress":0} -2022-11-16 17:45:28 DATA:  test-node-load.js load interval {"elapsed":12,"progress":0} -2022-11-16 17:45:28 DATA:  test-node-load.js load interval {"elapsed":24,"progress":0.03222546277199007} -2022-11-16 17:45:28 DATA:  test-node-load.js load interval {"elapsed":35,"progress":0.2135162934143239} -2022-11-16 17:45:28 DATA:  test-node-load.js load interval {"elapsed":61,"progress":0.3299591712723044} -2022-11-16 17:45:28 DATA:  test-node-load.js load interval {"elapsed":79,"progress":0.7259096583739463} -2022-11-16 17:45:28 STATE: test-node-load.js passed {"progress":1} -2022-11-16 17:45:28 INFO:  test-node-load.js load final {"progress":1} -2022-11-16 17:45:28 DATA:  test-node-load.js load interval {"elapsed":426,"progress":1} -2022-11-16 17:45:28 INFO:  -2022-11-16 17:45:28 INFO:  test-node-gear.js start -2022-11-16 17:45:28 DATA:  test-node-gear.js input: ["samples/in/ai-face.jpg"] -2022-11-16 17:45:29 STATE: test-node-gear.js passed: gear faceres samples/in/ai-face.jpg -2022-11-16 17:45:29 DATA:  test-node-gear.js results {"face":0,"model":"faceres","image":"samples/in/ai-face.jpg","age":23.5,"gender":"female","genderScore":0.92} -2022-11-16 17:45:29 STATE: test-node-gear.js passed: gear gear samples/in/ai-face.jpg -2022-11-16 17:45:29 DATA:  test-node-gear.js results {"face":0,"model":"gear","image":"samples/in/ai-face.jpg","age":23.3,"gender":"female","genderScore":0.51,"race":[{"score":0.93,"race":"white"}]} -2022-11-16 17:45:30 STATE: test-node-gear.js passed: gear ssrnet samples/in/ai-face.jpg -2022-11-16 17:45:30 DATA:  test-node-gear.js results {"face":0,"model":"ssrnet","image":"samples/in/ai-face.jpg","age":23.4,"gender":"female","genderScore":0.99} -2022-11-16 17:45:30 INFO:  -2022-11-16 17:45:30 INFO:  test-backend-node.js start -2022-11-16 17:45:30 INFO:  test-backend-node.js test: configuration validation -2022-11-16 17:45:30 STATE: test-backend-node.js passed: configuration default validation [] -2022-11-16 17:45:30 STATE: test-backend-node.js passed: configuration invalid validation [{"reason":"unknown property","where":"config.invalid = true"}] -2022-11-16 17:45:30 INFO:  test-backend-node.js test: model load -2022-11-16 17:45:30 STATE: test-backend-node.js passed: models loaded 25 11 [{"name":"ssrnetage","loaded":false,"url":null},{"name":"gear","loaded":false,"url":null},{"name":"blazeposedetect","loaded":false,"url":null},{"name":"blazepose","loaded":false,"url":null},{"name":"centernet","loaded":true,"url":"file://models/mb3-centernet.json"},{"name":"efficientpose","loaded":false,"url":null},{"name":"mobilefacenet","loaded":false,"url":null},{"name":"insightface","loaded":false,"url":null},{"name":"emotion","loaded":true,"url":"file://models/emotion.json"},{"name":"facedetect","loaded":true,"url":"file://models/blazeface.json"},{"name":"faceiris","loaded":true,"url":"file://models/iris.json"},{"name":"facemesh","loaded":true,"url":"file://models/facemesh.json"},{"name":"faceres","loaded":true,"url":"file://models/faceres.json"},{"name":"ssrnetgender","loaded":false,"url":null},{"name":"handpose","loaded":false,"url":null},{"name":"handskeleton","loaded":true,"url":"file://models/handlandmark-full.json"},{"name":"handtrack","loaded":true,"url":"file://models/handtrack.json"},{"name":"liveness","loaded":true,"url":"file://models/liveness.json"},{"name":"meet","loaded":false,"url":null},{"name":"movenet","loaded":true,"url":"file://models/movenet-lightning.json"},{"name":"nanodet","loaded":false,"url":null},{"name":"posenet","loaded":false,"url":null},{"name":"selfie","loaded":false,"url":null},{"name":"rvm","loaded":false,"url":null},{"name":"antispoof","loaded":true,"url":"file://models/antispoof.json"}] -2022-11-16 17:45:30 INFO:  test-backend-node.js memory: {"memory":{"unreliable":true,"numTensors":1785,"numDataBuffers":1785,"numBytes":63247332}} -2022-11-16 17:45:30 INFO:  test-backend-node.js state: {"state":{"registeredVariables":{},"nextTapeNodeId":0,"numBytes":63247332,"numTensors":1785,"numStringTensors":0,"numDataBuffers":1785,"gradientDepth":0,"kernelDepth":0,"scopeStack":[],"numDataMovesStack":[],"nextScopeId":0,"tensorInfo":{},"profiling":false,"activeProfile":{"newBytes":0,"newTensors":0,"peakBytes":0,"kernels":[],"result":null,"kernelNames":[]}}} -2022-11-16 17:45:30 INFO:  test-backend-node.js test: warmup -2022-11-16 17:45:30 STATE: test-backend-node.js passed: create human -2022-11-16 17:45:30 INFO:  test-backend-node.js human version: 3.0.0 -2022-11-16 17:45:30 INFO:  test-backend-node.js platform: linux x64 agent: NodeJS v19.1.0 -2022-11-16 17:45:30 INFO:  test-backend-node.js tfjs version: 4.0.0 -2022-11-16 17:45:30 INFO:  test-backend-node.js env: {"browser":false,"node":true,"platform":"linux x64","agent":"NodeJS v19.1.0","backends":["cpu","tensorflow"],"initial":false,"tfjs":{"version":"4.0.0"},"offscreen":false,"perfadd":false,"tensorflow":{"version":"2.9.1","gpu":false},"wasm":{"supported":true,"backend":false},"webgl":{"supported":false,"backend":false},"webgpu":{"supported":false,"backend":false},"cpu":{"flags":[]},"kernels":169} -2022-11-16 17:45:30 STATE: test-backend-node.js passed: set backend: tensorflow -2022-11-16 17:45:30 STATE: test-backend-node.js tensors 1785 -2022-11-16 17:45:30 STATE: test-backend-node.js passed: load models -2022-11-16 17:45:30 STATE: test-backend-node.js result: defined models: 25 loaded models: 11 -2022-11-16 17:45:30 STATE: test-backend-node.js passed: warmup: none default -2022-11-16 17:45:30 DATA:  test-backend-node.js result: face: 0 body: 0 hand: 0 gesture: 0 object: 0 person: 0 {} {} {} -2022-11-16 17:45:30 DATA:  test-backend-node.js result: performance: load: null total: null -2022-11-16 17:45:30 STATE: test-backend-node.js passed: warmup none result match -2022-11-16 17:45:30 STATE: test-backend-node.js event: image -2022-11-16 17:45:30 STATE: test-backend-node.js event: detect -2022-11-16 17:45:30 STATE: test-backend-node.js event: warmup -2022-11-16 17:45:30 STATE: test-backend-node.js passed: warmup: face default -2022-11-16 17:45:30 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.42,"keypoints":4} -2022-11-16 17:45:30 DATA:  test-backend-node.js result: performance: load: null total: 421 -2022-11-16 17:45:30 STATE: test-backend-node.js passed: warmup face result match -2022-11-16 17:45:30 STATE: test-backend-node.js event: image -2022-11-16 17:45:31 STATE: test-backend-node.js event: detect -2022-11-16 17:45:31 STATE: test-backend-node.js event: warmup -2022-11-16 17:45:31 STATE: test-backend-node.js passed: warmup: body default -2022-11-16 17:45:31 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:45:31 DATA:  test-backend-node.js result: performance: load: null total: 317 -2022-11-16 17:45:31 STATE: test-backend-node.js passed: warmup body result match -2022-11-16 17:45:31 STATE: test-backend-node.js details: {"face":{"boxScore":0.92,"faceScore":1,"age":23.7,"gender":"female","genderScore":0.97},"emotion":[{"score":0.63,"emotion":"angry"},{"score":0.22,"emotion":"fear"}],"body":{"score":0.92,"keypoints":17},"hand":{"boxScore":0.52,"fingerScore":0.73,"keypoints":21},"gestures":[{"face":0,"gesture":"facing right"},{"face":0,"gesture":"mouth 10% open"},{"hand":0,"gesture":"pinky forward"},{"hand":0,"gesture":"palm up"},{"hand":0,"gesture":"open palm"},{"iris":0,"gesture":"looking left"},{"iris":0,"gesture":"looking up"}]} -2022-11-16 17:45:31 INFO:  test-backend-node.js test: details verification -2022-11-16 17:45:31 STATE: test-backend-node.js start default -2022-11-16 17:45:31 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} -2022-11-16 17:45:31 STATE: test-backend-node.js event: image -2022-11-16 17:45:31 STATE: test-backend-node.js event: detect -2022-11-16 17:45:31 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg default -2022-11-16 17:45:31 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:45:31 DATA:  test-backend-node.js result: performance: load: null total: 313 -2022-11-16 17:45:31 STATE: test-backend-node.js passed: details face length 1 -2022-11-16 17:45:31 STATE: test-backend-node.js passed: details face score 1 0.93 1 -2022-11-16 17:45:31 STATE: test-backend-node.js passed: details face age/gender 23.7 female 0.97 2.34 -2022-11-16 17:45:31 STATE: test-backend-node.js passed: details face arrays 4 478 1024 -2022-11-16 17:45:31 STATE: test-backend-node.js passed: details face emotion 2 {"score":0.59,"emotion":"angry"} -2022-11-16 17:45:31 STATE: test-backend-node.js passed: details face anti-spoofing 0.79 -2022-11-16 17:45:31 STATE: test-backend-node.js passed: details face liveness 0.83 -2022-11-16 17:45:31 STATE: test-backend-node.js passed: details body length 1 -2022-11-16 17:45:31 STATE: test-backend-node.js passed: details body 0.92 17 6 -2022-11-16 17:45:31 STATE: test-backend-node.js passed: details hand length 1 -2022-11-16 17:45:31 STATE: test-backend-node.js passed: details hand 0.51 0.73 point -2022-11-16 17:45:31 STATE: test-backend-node.js passed: details hand arrays 21 5 7 -2022-11-16 17:45:31 STATE: test-backend-node.js passed: details gesture length 7 -2022-11-16 17:45:31 STATE: test-backend-node.js passed: details gesture first {"face":0,"gesture":"facing right"} -2022-11-16 17:45:31 STATE: test-backend-node.js passed: details object length 1 -2022-11-16 17:45:31 STATE: test-backend-node.js passed: details object 0.72 person -2022-11-16 17:45:31 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1371996928} -2022-11-16 17:45:31 STATE: test-backend-node.js event: image -2022-11-16 17:45:32 STATE: test-backend-node.js event: detect -2022-11-16 17:45:32 STATE: test-backend-node.js passed: tensor shape: [1,1200,1200,4] dtype: float32 -2022-11-16 17:45:32 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1200,1200,4] {"checksum":1371996928} -2022-11-16 17:45:32 STATE: test-backend-node.js event: image -2022-11-16 17:45:32 STATE: test-backend-node.js event: detect -2022-11-16 17:45:32 STATE: test-backend-node.js passed: tensor shape: [1200,1200,4] dtype: float32 -2022-11-16 17:45:32 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} -2022-11-16 17:45:32 STATE: test-backend-node.js event: image -2022-11-16 17:45:33 STATE: test-backend-node.js event: detect -2022-11-16 17:45:33 STATE: test-backend-node.js passed: tensor shape: [1,1200,1200,3] dtype: float32 -2022-11-16 17:45:33 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1200,1200,3] {"checksum":1004796864} -2022-11-16 17:45:33 STATE: test-backend-node.js event: image -2022-11-16 17:45:33 STATE: test-backend-node.js event: detect -2022-11-16 17:45:33 STATE: test-backend-node.js passed: tensor shape: [1200,1200,3] dtype: float32 -2022-11-16 17:45:33 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1371996871} -2022-11-16 17:45:33 STATE: test-backend-node.js event: image -2022-11-16 17:45:34 STATE: test-backend-node.js event: detect -2022-11-16 17:45:34 STATE: test-backend-node.js passed: tensor shape: [1,1200,1200,4] dtype: int32 -2022-11-16 17:45:34 INFO:  test-backend-node.js test default -2022-11-16 17:45:34 STATE: test-backend-node.js start async -2022-11-16 17:45:34 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} -2022-11-16 17:45:34 STATE: test-backend-node.js event: image -2022-11-16 17:45:34 STATE: test-backend-node.js event: detect -2022-11-16 17:45:34 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg async -2022-11-16 17:45:34 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:45:34 DATA:  test-backend-node.js result: performance: load: null total: 314 -2022-11-16 17:45:34 STATE: test-backend-node.js passed: default result face match 1 female 0.97 -2022-11-16 17:45:34 INFO:  test-backend-node.js test sync -2022-11-16 17:45:34 STATE: test-backend-node.js start sync -2022-11-16 17:45:34 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} -2022-11-16 17:45:34 STATE: test-backend-node.js event: image -2022-11-16 17:45:35 STATE: test-backend-node.js event: detect -2022-11-16 17:45:35 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg sync -2022-11-16 17:45:35 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:45:35 DATA:  test-backend-node.js result: performance: load: null total: 295 -2022-11-16 17:45:35 STATE: test-backend-node.js passed: default sync 1 female 0.97 -2022-11-16 17:45:35 INFO:  test-backend-node.js test: image process -2022-11-16 17:45:35 STATE: test-backend-node.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120} -2022-11-16 17:45:35 STATE: test-backend-node.js passed: image input null [1,256,256,3] -2022-11-16 17:45:35 INFO:  test-backend-node.js test: image null -2022-11-16 17:45:35 STATE: test-backend-node.js passed: invalid input could not convert input to tensor -2022-11-16 17:45:35 INFO:  test-backend-node.js test face similarity -2022-11-16 17:45:35 STATE: test-backend-node.js start face similarity -2022-11-16 17:45:35 STATE: test-backend-node.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120} -2022-11-16 17:45:35 STATE: test-backend-node.js event: image -2022-11-16 17:45:35 STATE: test-backend-node.js event: detect -2022-11-16 17:45:35 STATE: test-backend-node.js passed: detect: samples/in/ai-face.jpg face similarity -2022-11-16 17:45:35 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 6 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":3} -2022-11-16 17:45:35 DATA:  test-backend-node.js result: performance: load: null total: 284 -2022-11-16 17:45:35 STATE: test-backend-node.js start face similarity -2022-11-16 17:45:35 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} -2022-11-16 17:45:35 STATE: test-backend-node.js event: image -2022-11-16 17:45:35 STATE: test-backend-node.js event: detect -2022-11-16 17:45:35 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg face similarity -2022-11-16 17:45:35 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:45:35 DATA:  test-backend-node.js result: performance: load: null total: 292 -2022-11-16 17:45:35 STATE: test-backend-node.js start face similarity -2022-11-16 17:45:35 STATE: test-backend-node.js passed: load image: samples/in/ai-upper.jpg [1,720,688,3] {"checksum":151289024} -2022-11-16 17:45:35 STATE: test-backend-node.js event: image -2022-11-16 17:45:36 STATE: test-backend-node.js event: detect -2022-11-16 17:45:36 STATE: test-backend-node.js passed: detect: samples/in/ai-upper.jpg face similarity -2022-11-16 17:45:36 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.75,"keypoints":7} -2022-11-16 17:45:36 DATA:  test-backend-node.js result: performance: load: null total: 267 -2022-11-16 17:45:36 STATE: test-backend-node.js passed: face descriptor -2022-11-16 17:45:36 STATE: test-backend-node.js passed: face similarity {"similarity":[1,0.44727441595492046,0.556793560189727],"descriptors":[1024,1024,1024]} -2022-11-16 17:45:36 INFO:  test-backend-node.js test object -2022-11-16 17:45:36 STATE: test-backend-node.js start object -2022-11-16 17:45:36 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} -2022-11-16 17:45:36 STATE: test-backend-node.js event: image -2022-11-16 17:45:36 STATE: test-backend-node.js event: detect -2022-11-16 17:45:36 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg object -2022-11-16 17:45:36 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:45:36 DATA:  test-backend-node.js result: performance: load: null total: 291 -2022-11-16 17:45:36 STATE: test-backend-node.js passed: centernet -2022-11-16 17:45:36 STATE: test-backend-node.js start object -2022-11-16 17:45:37 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} -2022-11-16 17:45:37 STATE: test-backend-node.js event: image -2022-11-16 17:45:37 STATE: test-backend-node.js event: detect -2022-11-16 17:45:37 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg object -2022-11-16 17:45:37 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 3 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.86,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:45:37 DATA:  test-backend-node.js result: performance: load: null total: 295 -2022-11-16 17:45:37 STATE: test-backend-node.js passed: nanodet -2022-11-16 17:45:37 INFO:  test-backend-node.js test sensitive -2022-11-16 17:45:37 STATE: test-backend-node.js start sensitive -2022-11-16 17:45:38 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} -2022-11-16 17:45:38 STATE: test-backend-node.js event: image -2022-11-16 17:45:38 STATE: test-backend-node.js event: detect -2022-11-16 17:45:38 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg sensitive -2022-11-16 17:45:38 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.92,"keypoints":17} -2022-11-16 17:45:38 DATA:  test-backend-node.js result: performance: load: null total: 263 -2022-11-16 17:45:38 STATE: test-backend-node.js passed: sensitive result match -2022-11-16 17:45:38 STATE: test-backend-node.js passed: sensitive face result match -2022-11-16 17:45:38 STATE: test-backend-node.js passed: sensitive face emotion result [{"score":0.59,"emotion":"angry"},{"score":0.29,"emotion":"fear"}] -2022-11-16 17:45:38 STATE: test-backend-node.js passed: sensitive body result match -2022-11-16 17:45:38 STATE: test-backend-node.js passed: sensitive hand result match -2022-11-16 17:45:38 INFO:  test-backend-node.js test body -2022-11-16 17:45:38 STATE: test-backend-node.js start blazepose -2022-11-16 17:45:40 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} -2022-11-16 17:45:40 STATE: test-backend-node.js event: image -2022-11-16 17:45:40 STATE: test-backend-node.js event: detect -2022-11-16 17:45:40 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg blazepose -2022-11-16 17:45:40 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.99,"keypoints":39} -2022-11-16 17:45:40 DATA:  test-backend-node.js result: performance: load: null total: 338 -2022-11-16 17:45:40 STATE: test-backend-node.js passed: blazepose -2022-11-16 17:45:40 STATE: test-backend-node.js start efficientpose -2022-11-16 17:45:41 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} -2022-11-16 17:45:41 STATE: test-backend-node.js event: image -2022-11-16 17:45:41 STATE: test-backend-node.js event: detect -2022-11-16 17:45:41 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg efficientpose -2022-11-16 17:45:41 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.75,"keypoints":13} -2022-11-16 17:45:41 DATA:  test-backend-node.js result: performance: load: null total: 307 -2022-11-16 17:45:41 STATE: test-backend-node.js passed: efficientpose -2022-11-16 17:45:41 STATE: test-backend-node.js start posenet -2022-11-16 17:45:41 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} -2022-11-16 17:45:41 STATE: test-backend-node.js event: image -2022-11-16 17:45:42 STATE: test-backend-node.js event: detect -2022-11-16 17:45:42 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg posenet -2022-11-16 17:45:42 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.96,"keypoints":16} -2022-11-16 17:45:42 DATA:  test-backend-node.js result: performance: load: null total: 254 -2022-11-16 17:45:42 STATE: test-backend-node.js passed: posenet -2022-11-16 17:45:42 STATE: test-backend-node.js start movenet -2022-11-16 17:45:42 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} -2022-11-16 17:45:42 STATE: test-backend-node.js event: image -2022-11-16 17:45:42 STATE: test-backend-node.js event: detect -2022-11-16 17:45:42 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg movenet -2022-11-16 17:45:42 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.92,"keypoints":17} -2022-11-16 17:45:42 DATA:  test-backend-node.js result: performance: load: null total: 252 -2022-11-16 17:45:42 STATE: test-backend-node.js passed: movenet -2022-11-16 17:45:42 INFO:  test-backend-node.js test face matching -2022-11-16 17:45:42 STATE: test-backend-node.js passed: face database 40 -2022-11-16 17:45:42 STATE: test-backend-node.js passed: face match {"first":{"index":4,"similarity":0.7827852251220577}} {"second":{"index":4,"similarity":0.5002052057057577}} {"third":{"index":4,"similarity":0.5401588464054732}} -2022-11-16 17:45:42 INFO:  test-backend-node.js test face similarity alternative -2022-11-16 17:45:42 STATE: test-backend-node.js start face embeddings -2022-11-16 17:45:43 STATE: test-backend-node.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120} -2022-11-16 17:45:43 STATE: test-backend-node.js event: image -2022-11-16 17:45:43 ERROR: test-backend-node.js failed: testDetect face embeddings -2022-11-16 17:45:43 ERROR: test-backend-node.js uncaughtException {"name":"TypeError","message":"Cannot read properties of undefined (reading 'img_inputs')","stack":["TypeError: Cannot read properties of undefined (reading 'img_inputs')"," at /home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30706:69"," at Array.reduce ()"," at GraphModel.normalizeInputs (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30705:32)"," at GraphModel.execute (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30777:23)"," at /home/vlado/dev/human/dist/human.node.js:99:176887"," at new Promise ()"," at q5 (/home/vlado/dev/human/dist/human.node.js:99:176656)"," at k1 (/home/vlado/dev/human/dist/human.node.js:121:6792)"," at process.processTicksAndRejections (node:internal/process/task_queues:95:5)"," at async /home/vlado/dev/human/dist/human.node.js:840:9036"]} -2022-11-16 17:45:43 INFO:  -2022-11-16 17:45:43 INFO:  test-backend-node-gpu.js start -2022-11-16 17:45:44 INFO:  test-backend-node-gpu.js test: configuration validation -2022-11-16 17:45:44 STATE: test-backend-node-gpu.js passed: configuration default validation [] -2022-11-16 17:45:44 STATE: test-backend-node-gpu.js passed: configuration invalid validation [{"reason":"unknown property","where":"config.invalid = true"}] -2022-11-16 17:45:44 INFO:  test-backend-node-gpu.js test: model load -2022-11-16 17:45:44 STATE: test-backend-node-gpu.js passed: models loaded 25 11 [{"name":"ssrnetage","loaded":false,"url":null},{"name":"gear","loaded":false,"url":null},{"name":"blazeposedetect","loaded":false,"url":null},{"name":"blazepose","loaded":false,"url":null},{"name":"centernet","loaded":true,"url":"file://models/mb3-centernet.json"},{"name":"efficientpose","loaded":false,"url":null},{"name":"mobilefacenet","loaded":false,"url":null},{"name":"insightface","loaded":false,"url":null},{"name":"emotion","loaded":true,"url":"file://models/emotion.json"},{"name":"facedetect","loaded":true,"url":"file://models/blazeface.json"},{"name":"faceiris","loaded":true,"url":"file://models/iris.json"},{"name":"facemesh","loaded":true,"url":"file://models/facemesh.json"},{"name":"faceres","loaded":true,"url":"file://models/faceres.json"},{"name":"ssrnetgender","loaded":false,"url":null},{"name":"handpose","loaded":false,"url":null},{"name":"handskeleton","loaded":true,"url":"file://models/handlandmark-full.json"},{"name":"handtrack","loaded":true,"url":"file://models/handtrack.json"},{"name":"liveness","loaded":true,"url":"file://models/liveness.json"},{"name":"meet","loaded":false,"url":null},{"name":"movenet","loaded":true,"url":"file://models/movenet-lightning.json"},{"name":"nanodet","loaded":false,"url":null},{"name":"posenet","loaded":false,"url":null},{"name":"selfie","loaded":false,"url":null},{"name":"rvm","loaded":false,"url":null},{"name":"antispoof","loaded":true,"url":"file://models/antispoof.json"}] -2022-11-16 17:45:44 INFO:  test-backend-node-gpu.js memory: {"memory":{"unreliable":true,"numTensors":1785,"numDataBuffers":1785,"numBytes":63247332}} -2022-11-16 17:45:44 INFO:  test-backend-node-gpu.js state: {"state":{"registeredVariables":{},"nextTapeNodeId":0,"numBytes":63247332,"numTensors":1785,"numStringTensors":0,"numDataBuffers":1785,"gradientDepth":0,"kernelDepth":0,"scopeStack":[],"numDataMovesStack":[],"nextScopeId":0,"tensorInfo":{},"profiling":false,"activeProfile":{"newBytes":0,"newTensors":0,"peakBytes":0,"kernels":[],"result":null,"kernelNames":[]}}} -2022-11-16 17:45:44 INFO:  test-backend-node-gpu.js test: warmup -2022-11-16 17:45:44 STATE: test-backend-node-gpu.js passed: create human -2022-11-16 17:45:44 INFO:  test-backend-node-gpu.js human version: 3.0.0 -2022-11-16 17:45:44 INFO:  test-backend-node-gpu.js platform: linux x64 agent: NodeJS v19.1.0 -2022-11-16 17:45:44 INFO:  test-backend-node-gpu.js tfjs version: 4.0.0 -2022-11-16 17:45:44 INFO:  test-backend-node-gpu.js env: {"browser":false,"node":true,"platform":"linux x64","agent":"NodeJS v19.1.0","backends":["cpu","tensorflow"],"initial":false,"tfjs":{"version":"4.0.0"},"offscreen":false,"perfadd":false,"tensorflow":{"version":"2.9.1","gpu":true},"wasm":{"supported":true,"backend":false},"webgl":{"supported":false,"backend":false},"webgpu":{"supported":false,"backend":false},"cpu":{"flags":[]},"kernels":169} -2022-11-16 17:45:44 STATE: test-backend-node-gpu.js passed: set backend: tensorflow -2022-11-16 17:45:44 STATE: test-backend-node-gpu.js tensors 1785 -2022-11-16 17:45:44 STATE: test-backend-node-gpu.js passed: load models -2022-11-16 17:45:44 STATE: test-backend-node-gpu.js result: defined models: 25 loaded models: 11 -2022-11-16 17:45:44 STATE: test-backend-node-gpu.js passed: warmup: none default -2022-11-16 17:45:44 DATA:  test-backend-node-gpu.js result: face: 0 body: 0 hand: 0 gesture: 0 object: 0 person: 0 {} {} {} -2022-11-16 17:45:44 DATA:  test-backend-node-gpu.js result: performance: load: null total: null -2022-11-16 17:45:44 STATE: test-backend-node-gpu.js passed: warmup none result match -2022-11-16 17:45:44 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js event: warmup -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: warmup: face default -2022-11-16 17:45:46 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.42,"keypoints":4} -2022-11-16 17:45:46 DATA:  test-backend-node-gpu.js result: performance: load: null total: 1862 -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: warmup face result match -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js event: warmup -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: warmup: body default -2022-11-16 17:45:46 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:45:46 DATA:  test-backend-node-gpu.js result: performance: load: null total: 155 -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: warmup body result match -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js details: {"face":{"boxScore":0.92,"faceScore":1,"age":23.7,"gender":"female","genderScore":0.97},"emotion":[{"score":0.63,"emotion":"angry"},{"score":0.22,"emotion":"fear"}],"body":{"score":0.92,"keypoints":17},"hand":{"boxScore":0.52,"fingerScore":0.73,"keypoints":21},"gestures":[{"face":0,"gesture":"facing right"},{"face":0,"gesture":"mouth 10% open"},{"hand":0,"gesture":"pinky forward"},{"hand":0,"gesture":"palm up"},{"hand":0,"gesture":"open palm"},{"iris":0,"gesture":"looking left"},{"iris":0,"gesture":"looking up"}]} -2022-11-16 17:45:46 INFO:  test-backend-node-gpu.js test: details verification -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js start default -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg default -2022-11-16 17:45:46 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:45:46 DATA:  test-backend-node-gpu.js result: performance: load: null total: 169 -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: details face length 1 -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: details face score 1 0.93 1 -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: details face age/gender 23.7 female 0.97 2.34 -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: details face arrays 4 478 1024 -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: details face emotion 2 {"score":0.59,"emotion":"angry"} -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: details face anti-spoofing 0.79 -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: details face liveness 0.83 -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: details body length 1 -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: details body 0.92 17 6 -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: details hand length 1 -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: details hand 0.51 0.73 point -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: details hand arrays 21 5 7 -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: details gesture length 7 -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: details gesture first {"face":0,"gesture":"facing right"} -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: details object length 1 -2022-11-16 17:45:46 STATE: test-backend-node-gpu.js passed: details object 0.72 person -2022-11-16 17:45:47 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1371996928} -2022-11-16 17:45:47 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:47 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:47 STATE: test-backend-node-gpu.js passed: tensor shape: [1,1200,1200,4] dtype: float32 -2022-11-16 17:45:47 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1200,1200,4] {"checksum":1371996928} -2022-11-16 17:45:47 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:47 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:47 STATE: test-backend-node-gpu.js passed: tensor shape: [1200,1200,4] dtype: float32 -2022-11-16 17:45:47 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} -2022-11-16 17:45:47 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:47 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:47 STATE: test-backend-node-gpu.js passed: tensor shape: [1,1200,1200,3] dtype: float32 -2022-11-16 17:45:48 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1200,1200,3] {"checksum":1004796928} -2022-11-16 17:45:48 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:48 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:48 STATE: test-backend-node-gpu.js passed: tensor shape: [1200,1200,3] dtype: float32 -2022-11-16 17:45:48 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1371996871} -2022-11-16 17:45:48 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:48 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:48 STATE: test-backend-node-gpu.js passed: tensor shape: [1,1200,1200,4] dtype: int32 -2022-11-16 17:45:48 INFO:  test-backend-node-gpu.js test default -2022-11-16 17:45:48 STATE: test-backend-node-gpu.js start async -2022-11-16 17:45:48 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} -2022-11-16 17:45:48 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:48 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:48 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg async -2022-11-16 17:45:48 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:45:48 DATA:  test-backend-node-gpu.js result: performance: load: null total: 149 -2022-11-16 17:45:48 STATE: test-backend-node-gpu.js passed: default result face match 1 female 0.97 -2022-11-16 17:45:48 INFO:  test-backend-node-gpu.js test sync -2022-11-16 17:45:48 STATE: test-backend-node-gpu.js start sync -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg sync -2022-11-16 17:45:49 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:45:49 DATA:  test-backend-node-gpu.js result: performance: load: null total: 145 -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js passed: default sync 1 female 0.97 -2022-11-16 17:45:49 INFO:  test-backend-node-gpu.js test: image process -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120} -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js passed: image input null [1,256,256,3] -2022-11-16 17:45:49 INFO:  test-backend-node-gpu.js test: image null -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js passed: invalid input could not convert input to tensor -2022-11-16 17:45:49 INFO:  test-backend-node-gpu.js test face similarity -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js start face similarity -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120} -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-face.jpg face similarity -2022-11-16 17:45:49 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 6 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":3} -2022-11-16 17:45:49 DATA:  test-backend-node-gpu.js result: performance: load: null total: 145 -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js start face similarity -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg face similarity -2022-11-16 17:45:49 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:45:49 DATA:  test-backend-node-gpu.js result: performance: load: null total: 137 -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js start face similarity -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-upper.jpg [1,720,688,3] {"checksum":151289056} -2022-11-16 17:45:49 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:50 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:50 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-upper.jpg face similarity -2022-11-16 17:45:50 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.75,"keypoints":7} -2022-11-16 17:45:50 DATA:  test-backend-node-gpu.js result: performance: load: null total: 149 -2022-11-16 17:45:50 STATE: test-backend-node-gpu.js passed: face descriptor -2022-11-16 17:45:50 STATE: test-backend-node-gpu.js passed: face similarity {"similarity":[1,0.4475002983522097,0.5570879556505012],"descriptors":[1024,1024,1024]} -2022-11-16 17:45:50 INFO:  test-backend-node-gpu.js test object -2022-11-16 17:45:50 STATE: test-backend-node-gpu.js start object -2022-11-16 17:45:50 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} -2022-11-16 17:45:50 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:50 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:50 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg object -2022-11-16 17:45:50 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:45:50 DATA:  test-backend-node-gpu.js result: performance: load: null total: 139 -2022-11-16 17:45:50 STATE: test-backend-node-gpu.js passed: centernet -2022-11-16 17:45:50 STATE: test-backend-node-gpu.js start object -2022-11-16 17:45:51 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} -2022-11-16 17:45:51 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:51 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:51 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg object -2022-11-16 17:45:51 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 3 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.86,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:45:51 DATA:  test-backend-node-gpu.js result: performance: load: null total: 566 -2022-11-16 17:45:51 STATE: test-backend-node-gpu.js passed: nanodet -2022-11-16 17:45:51 INFO:  test-backend-node-gpu.js test sensitive -2022-11-16 17:45:51 STATE: test-backend-node-gpu.js start sensitive -2022-11-16 17:45:52 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} -2022-11-16 17:45:52 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:52 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:52 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg sensitive -2022-11-16 17:45:52 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.92,"keypoints":17} -2022-11-16 17:45:52 DATA:  test-backend-node-gpu.js result: performance: load: null total: 125 -2022-11-16 17:45:52 STATE: test-backend-node-gpu.js passed: sensitive result match -2022-11-16 17:45:52 STATE: test-backend-node-gpu.js passed: sensitive face result match -2022-11-16 17:45:52 STATE: test-backend-node-gpu.js passed: sensitive face emotion result [{"score":0.59,"emotion":"angry"},{"score":0.29,"emotion":"fear"}] -2022-11-16 17:45:52 STATE: test-backend-node-gpu.js passed: sensitive body result match -2022-11-16 17:45:52 STATE: test-backend-node-gpu.js passed: sensitive hand result match -2022-11-16 17:45:52 INFO:  test-backend-node-gpu.js test body -2022-11-16 17:45:52 STATE: test-backend-node-gpu.js start blazepose -2022-11-16 17:45:54 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} -2022-11-16 17:45:54 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:54 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:54 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg blazepose -2022-11-16 17:45:54 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.99,"keypoints":39} -2022-11-16 17:45:54 DATA:  test-backend-node-gpu.js result: performance: load: null total: 300 -2022-11-16 17:45:54 STATE: test-backend-node-gpu.js passed: blazepose -2022-11-16 17:45:54 STATE: test-backend-node-gpu.js start efficientpose -2022-11-16 17:45:55 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} -2022-11-16 17:45:55 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:55 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:55 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg efficientpose -2022-11-16 17:45:55 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.75,"keypoints":13} -2022-11-16 17:45:55 DATA:  test-backend-node-gpu.js result: performance: load: null total: 942 -2022-11-16 17:45:55 STATE: test-backend-node-gpu.js passed: efficientpose -2022-11-16 17:45:55 STATE: test-backend-node-gpu.js start posenet -2022-11-16 17:45:56 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} -2022-11-16 17:45:56 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:56 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:56 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg posenet -2022-11-16 17:45:56 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.96,"keypoints":16} -2022-11-16 17:45:56 DATA:  test-backend-node-gpu.js result: performance: load: null total: 119 -2022-11-16 17:45:56 STATE: test-backend-node-gpu.js passed: posenet -2022-11-16 17:45:56 STATE: test-backend-node-gpu.js start movenet -2022-11-16 17:45:56 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} -2022-11-16 17:45:56 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:57 STATE: test-backend-node-gpu.js event: detect -2022-11-16 17:45:57 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg movenet -2022-11-16 17:45:57 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.92,"keypoints":17} -2022-11-16 17:45:57 DATA:  test-backend-node-gpu.js result: performance: load: null total: 130 -2022-11-16 17:45:57 STATE: test-backend-node-gpu.js passed: movenet -2022-11-16 17:45:57 INFO:  test-backend-node-gpu.js test face matching -2022-11-16 17:45:57 STATE: test-backend-node-gpu.js passed: face database 40 -2022-11-16 17:45:57 STATE: test-backend-node-gpu.js passed: face match {"first":{"index":4,"similarity":0.7829338043932047}} {"second":{"index":4,"similarity":0.5002928781584631}} {"third":{"index":4,"similarity":0.5402934771672516}} -2022-11-16 17:45:57 INFO:  test-backend-node-gpu.js test face similarity alternative -2022-11-16 17:45:57 STATE: test-backend-node-gpu.js start face embeddings -2022-11-16 17:45:57 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120} -2022-11-16 17:45:57 STATE: test-backend-node-gpu.js event: image -2022-11-16 17:45:57 ERROR: test-backend-node-gpu.js failed: testDetect face embeddings -2022-11-16 17:45:57 ERROR: test-backend-node-gpu.js uncaughtException {"name":"TypeError","message":"Cannot read properties of undefined (reading 'img_inputs')","stack":["TypeError: Cannot read properties of undefined (reading 'img_inputs')"," at /home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30706:69"," at Array.reduce ()"," at GraphModel.normalizeInputs (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30705:32)"," at GraphModel.execute (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30777:23)"," at /home/vlado/dev/human/dist/human.node-gpu.js:99:176887"," at new Promise ()"," at q5 (/home/vlado/dev/human/dist/human.node-gpu.js:99:176656)"," at k1 (/home/vlado/dev/human/dist/human.node-gpu.js:121:6792)"," at process.processTicksAndRejections (node:internal/process/task_queues:95:5)"," at async /home/vlado/dev/human/dist/human.node-gpu.js:840:9036"]} -2022-11-16 17:45:57 INFO:  -2022-11-16 17:45:57 INFO:  test-backend-node-wasm.js start -2022-11-16 17:45:58 DATA:  test-backend-node-wasm.js stdout: 2022-11-16 17:45:58 INFO:  { supported: true, backend: true, simd: undefined, multithread: undefined } https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@4.0.0/dist/ -2022-11-16 17:45:58 STATE: test-backend-node-wasm.js passed: model server: https://vladmandic.github.io/human-models/models/ -2022-11-16 17:45:58 INFO:  test-backend-node-wasm.js test: configuration validation -2022-11-16 17:45:58 STATE: test-backend-node-wasm.js passed: configuration default validation [] -2022-11-16 17:45:58 STATE: test-backend-node-wasm.js passed: configuration invalid validation [{"reason":"unknown property","where":"config.invalid = true"}] -2022-11-16 17:45:58 INFO:  test-backend-node-wasm.js test: model load -2022-11-16 17:46:00 STATE: test-backend-node-wasm.js passed: models loaded 25 11 [{"name":"ssrnetage","loaded":false,"url":null},{"name":"gear","loaded":false,"url":null},{"name":"blazeposedetect","loaded":false,"url":null},{"name":"blazepose","loaded":false,"url":null},{"name":"centernet","loaded":true,"url":"https://vladmandic.github.io/human-models/models/mb3-centernet.json"},{"name":"efficientpose","loaded":false,"url":null},{"name":"mobilefacenet","loaded":false,"url":null},{"name":"insightface","loaded":false,"url":null},{"name":"emotion","loaded":true,"url":"https://vladmandic.github.io/human-models/models/emotion.json"},{"name":"facedetect","loaded":true,"url":"https://vladmandic.github.io/human-models/models/blazeface.json"},{"name":"faceiris","loaded":true,"url":"https://vladmandic.github.io/human-models/models/iris.json"},{"name":"facemesh","loaded":true,"url":"https://vladmandic.github.io/human-models/models/facemesh.json"},{"name":"faceres","loaded":true,"url":"https://vladmandic.github.io/human-models/models/faceres.json"},{"name":"ssrnetgender","loaded":false,"url":null},{"name":"handpose","loaded":false,"url":null},{"name":"handskeleton","loaded":true,"url":"https://vladmandic.github.io/human-models/models/handlandmark-full.json"},{"name":"handtrack","loaded":true,"url":"https://vladmandic.github.io/human-models/models/handtrack.json"},{"name":"liveness","loaded":true,"url":"https://vladmandic.github.io/human-models/models/liveness.json"},{"name":"meet","loaded":false,"url":null},{"name":"movenet","loaded":true,"url":"https://vladmandic.github.io/human-models/models/movenet-lightning.json"},{"name":"nanodet","loaded":false,"url":null},{"name":"posenet","loaded":false,"url":null},{"name":"selfie","loaded":false,"url":null},{"name":"rvm","loaded":false,"url":null},{"name":"antispoof","loaded":true,"url":"https://vladmandic.github.io/human-models/models/antispoof.json"}] -2022-11-16 17:46:00 INFO:  test-backend-node-wasm.js memory: {"memory":{"unreliable":false,"numTensors":1785,"numDataBuffers":1785,"numBytes":63247332}} -2022-11-16 17:46:00 INFO:  test-backend-node-wasm.js state: {"state":{"registeredVariables":{},"nextTapeNodeId":0,"numBytes":63247332,"numTensors":1785,"numStringTensors":0,"numDataBuffers":1785,"gradientDepth":0,"kernelDepth":0,"scopeStack":[],"numDataMovesStack":[],"nextScopeId":0,"tensorInfo":{},"profiling":false,"activeProfile":{"newBytes":0,"newTensors":0,"peakBytes":0,"kernels":[],"result":null,"kernelNames":[]}}} -2022-11-16 17:46:00 INFO:  test-backend-node-wasm.js test: warmup -2022-11-16 17:46:00 STATE: test-backend-node-wasm.js passed: create human -2022-11-16 17:46:00 INFO:  test-backend-node-wasm.js human version: 3.0.0 -2022-11-16 17:46:00 INFO:  test-backend-node-wasm.js platform: linux x64 agent: NodeJS v19.1.0 -2022-11-16 17:46:00 INFO:  test-backend-node-wasm.js tfjs version: 4.0.0 -2022-11-16 17:46:00 INFO:  test-backend-node-wasm.js env: {"browser":false,"node":true,"platform":"linux x64","agent":"NodeJS v19.1.0","backends":["wasm"],"initial":false,"tfjs":{"version":"4.0.0"},"offscreen":false,"perfadd":false,"tensorflow":{},"wasm":{"supported":true,"backend":true,"simd":true,"multithread":false},"webgl":{"supported":false,"backend":false},"webgpu":{"supported":false,"backend":false},"cpu":{"flags":[]},"kernels":126} -2022-11-16 17:46:00 STATE: test-backend-node-wasm.js passed: set backend: wasm -2022-11-16 17:46:00 STATE: test-backend-node-wasm.js tensors 1785 -2022-11-16 17:46:00 STATE: test-backend-node-wasm.js passed: load models -2022-11-16 17:46:00 STATE: test-backend-node-wasm.js result: defined models: 25 loaded models: 11 -2022-11-16 17:46:00 STATE: test-backend-node-wasm.js passed: warmup: none default -2022-11-16 17:46:00 DATA:  test-backend-node-wasm.js result: face: 0 body: 0 hand: 0 gesture: 0 object: 0 person: 0 {} {} {} -2022-11-16 17:46:00 DATA:  test-backend-node-wasm.js result: performance: load: null total: null -2022-11-16 17:46:00 STATE: test-backend-node-wasm.js passed: warmup none result match -2022-11-16 17:46:00 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:00 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:00 STATE: test-backend-node-wasm.js event: warmup -2022-11-16 17:46:00 STATE: test-backend-node-wasm.js passed: warmup: face default -2022-11-16 17:46:00 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 6 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":3} -2022-11-16 17:46:00 DATA:  test-backend-node-wasm.js result: performance: load: null total: 471 -2022-11-16 17:46:00 STATE: test-backend-node-wasm.js passed: warmup face result match -2022-11-16 17:46:00 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js event: warmup -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: warmup: body default -2022-11-16 17:46:01 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:46:01 DATA:  test-backend-node-wasm.js result: performance: load: null total: 345 -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: warmup body result match -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js details: {"face":{"boxScore":0.93,"faceScore":1,"age":23.7,"gender":"female","genderScore":0.97},"emotion":[{"score":0.59,"emotion":"angry"},{"score":0.29,"emotion":"fear"}],"body":{"score":0.92,"keypoints":17},"hand":{"boxScore":0.51,"fingerScore":0.73,"keypoints":21},"gestures":[{"face":0,"gesture":"facing right"},{"face":0,"gesture":"mouth 21% open"},{"hand":0,"gesture":"pinky forward"},{"hand":0,"gesture":"palm up"},{"hand":0,"gesture":"open palm"},{"iris":0,"gesture":"looking left"},{"iris":0,"gesture":"looking up"}]} -2022-11-16 17:46:01 INFO:  test-backend-node-wasm.js test: details verification -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js start default -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg default -2022-11-16 17:46:01 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:46:01 DATA:  test-backend-node-wasm.js result: performance: load: null total: 328 -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: details face length 1 -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: details face score 1 0.93 1 -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: details face age/gender 23.7 female 0.97 2.34 -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: details face arrays 4 478 1024 -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: details face emotion 2 {"score":0.59,"emotion":"angry"} -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: details face anti-spoofing 0.79 -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: details face liveness 0.83 -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: details body length 1 -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: details body 0.92 17 6 -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: details hand length 1 -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: details hand 0.51 0.73 point -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: details hand arrays 21 5 7 -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: details gesture length 7 -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: details gesture first {"face":0,"gesture":"facing right"} -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: details object length 1 -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: details object 0.72 person -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1413675264} -2022-11-16 17:46:01 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:02 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:02 STATE: test-backend-node-wasm.js passed: tensor shape: [1,1200,1200,4] dtype: float32 -2022-11-16 17:46:02 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1200,1200,4] {"checksum":1413675264} -2022-11-16 17:46:02 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:02 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:02 STATE: test-backend-node-wasm.js passed: tensor shape: [1200,1200,4] dtype: float32 -2022-11-16 17:46:03 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} -2022-11-16 17:46:03 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:03 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:03 STATE: test-backend-node-wasm.js passed: tensor shape: [1,1200,1200,3] dtype: float32 -2022-11-16 17:46:03 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1200,1200,3] {"checksum":1038921856} -2022-11-16 17:46:03 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:03 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:03 STATE: test-backend-node-wasm.js passed: tensor shape: [1200,1200,3] dtype: float32 -2022-11-16 17:46:04 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1371996871} -2022-11-16 17:46:04 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:04 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:04 STATE: test-backend-node-wasm.js passed: tensor shape: [1,1200,1200,4] dtype: int32 -2022-11-16 17:46:04 INFO:  test-backend-node-wasm.js test default -2022-11-16 17:46:04 STATE: test-backend-node-wasm.js start async -2022-11-16 17:46:04 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} -2022-11-16 17:46:04 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg async -2022-11-16 17:46:05 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 8 object: 1 person: 1 {"score":1,"age":29.6,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:46:05 DATA:  test-backend-node-wasm.js result: performance: load: null total: 342 -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js passed: default result face match 1 female 0.97 -2022-11-16 17:46:05 INFO:  test-backend-node-wasm.js test sync -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js start sync -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg sync -2022-11-16 17:46:05 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 8 object: 1 person: 1 {"score":1,"age":29.6,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:46:05 DATA:  test-backend-node-wasm.js result: performance: load: null total: 327 -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js passed: default sync 1 female 0.97 -2022-11-16 17:46:05 INFO:  test-backend-node-wasm.js test: image process -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34697856} -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js passed: image input null [1,256,256,3] -2022-11-16 17:46:05 INFO:  test-backend-node-wasm.js test: image null -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js passed: invalid input could not convert input to tensor -2022-11-16 17:46:05 INFO:  test-backend-node-wasm.js test face similarity -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js start face similarity -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34697856} -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-face.jpg face similarity -2022-11-16 17:46:05 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 6 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":3} -2022-11-16 17:46:05 DATA:  test-backend-node-wasm.js result: performance: load: null total: 305 -2022-11-16 17:46:05 STATE: test-backend-node-wasm.js start face similarity -2022-11-16 17:46:06 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} -2022-11-16 17:46:06 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:06 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:06 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg face similarity -2022-11-16 17:46:06 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 8 object: 1 person: 1 {"score":1,"age":29.6,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:46:06 DATA:  test-backend-node-wasm.js result: performance: load: null total: 338 -2022-11-16 17:46:06 STATE: test-backend-node-wasm.js start face similarity -2022-11-16 17:46:06 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-upper.jpg [1,720,688,3] {"checksum":151155104} -2022-11-16 17:46:06 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:06 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:06 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-upper.jpg face similarity -2022-11-16 17:46:06 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.75,"keypoints":7} -2022-11-16 17:46:06 DATA:  test-backend-node-wasm.js result: performance: load: null total: 296 -2022-11-16 17:46:06 STATE: test-backend-node-wasm.js passed: face descriptor -2022-11-16 17:46:06 STATE: test-backend-node-wasm.js passed: face similarity {"similarity":[1,0.5266119940661309,0.4858842904087851],"descriptors":[1024,1024,1024]} -2022-11-16 17:46:06 INFO:  test-backend-node-wasm.js test object -2022-11-16 17:46:06 STATE: test-backend-node-wasm.js start object -2022-11-16 17:46:07 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} -2022-11-16 17:46:07 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:07 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:07 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg object -2022-11-16 17:46:07 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 8 object: 1 person: 1 {"score":1,"age":29.6,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} -2022-11-16 17:46:07 DATA:  test-backend-node-wasm.js result: performance: load: null total: 333 -2022-11-16 17:46:07 STATE: test-backend-node-wasm.js passed: centernet -2022-11-16 17:46:07 STATE: test-backend-node-wasm.js start object -2022-11-16 17:46:07 WARN:  test-backend-node-wasm.js missing kernel ops {"title":"object","model":"nanodet","url":"https://vladmandic.github.io/human-models/models/nanodet.json","missing":["sparsetodense"],"backkend":"wasm"} -2022-11-16 17:46:08 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} -2022-11-16 17:46:08 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:08 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:08 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg object -2022-11-16 17:46:08 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 8 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.92,"keypoints":17} -2022-11-16 17:46:08 DATA:  test-backend-node-wasm.js result: performance: load: null total: 221 -2022-11-16 17:46:08 ERROR: test-backend-node-wasm.js failed: nanodet [] -2022-11-16 17:46:08 INFO:  test-backend-node-wasm.js test sensitive -2022-11-16 17:46:08 STATE: test-backend-node-wasm.js start sensitive -2022-11-16 17:46:08 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} -2022-11-16 17:46:08 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:08 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:08 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg sensitive -2022-11-16 17:46:08 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 2 gesture: 10 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.92,"keypoints":17} -2022-11-16 17:46:08 DATA:  test-backend-node-wasm.js result: performance: load: null total: 245 -2022-11-16 17:46:08 STATE: test-backend-node-wasm.js passed: sensitive result match -2022-11-16 17:46:08 STATE: test-backend-node-wasm.js passed: sensitive face result match -2022-11-16 17:46:08 STATE: test-backend-node-wasm.js passed: sensitive face emotion result [{"score":0.46,"emotion":"neutral"},{"score":0.24,"emotion":"fear"},{"score":0.17,"emotion":"sad"}] -2022-11-16 17:46:08 STATE: test-backend-node-wasm.js passed: sensitive body result match -2022-11-16 17:46:08 STATE: test-backend-node-wasm.js passed: sensitive hand result match -2022-11-16 17:46:08 INFO:  test-backend-node-wasm.js test body -2022-11-16 17:46:08 STATE: test-backend-node-wasm.js start blazepose -2022-11-16 17:46:10 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} -2022-11-16 17:46:10 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:11 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:11 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg blazepose -2022-11-16 17:46:11 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 2 gesture: 10 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.99,"keypoints":39} -2022-11-16 17:46:11 DATA:  test-backend-node-wasm.js result: performance: load: null total: 412 -2022-11-16 17:46:11 STATE: test-backend-node-wasm.js passed: blazepose -2022-11-16 17:46:11 STATE: test-backend-node-wasm.js start efficientpose -2022-11-16 17:46:11 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} -2022-11-16 17:46:11 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:12 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:12 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg efficientpose -2022-11-16 17:46:12 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 2 gesture: 10 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.75,"keypoints":13} -2022-11-16 17:46:12 DATA:  test-backend-node-wasm.js result: performance: load: null total: 675 -2022-11-16 17:46:12 STATE: test-backend-node-wasm.js passed: efficientpose -2022-11-16 17:46:12 STATE: test-backend-node-wasm.js start posenet -2022-11-16 17:46:12 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} -2022-11-16 17:46:12 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:13 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:13 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg posenet -2022-11-16 17:46:13 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 2 gesture: 10 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.96,"keypoints":16} -2022-11-16 17:46:13 DATA:  test-backend-node-wasm.js result: performance: load: null total: 298 -2022-11-16 17:46:13 STATE: test-backend-node-wasm.js passed: posenet -2022-11-16 17:46:13 STATE: test-backend-node-wasm.js start movenet -2022-11-16 17:46:13 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} -2022-11-16 17:46:13 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:13 STATE: test-backend-node-wasm.js event: detect -2022-11-16 17:46:13 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg movenet -2022-11-16 17:46:13 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 2 gesture: 10 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.92,"keypoints":17} -2022-11-16 17:46:13 DATA:  test-backend-node-wasm.js result: performance: load: null total: 247 -2022-11-16 17:46:13 STATE: test-backend-node-wasm.js passed: movenet -2022-11-16 17:46:13 INFO:  test-backend-node-wasm.js test face matching -2022-11-16 17:46:13 STATE: test-backend-node-wasm.js passed: face database 40 -2022-11-16 17:46:13 STATE: test-backend-node-wasm.js passed: face match {"first":{"index":4,"similarity":0.7827852754786533}} {"second":{"index":4,"similarity":0.5660821189104794}} {"third":{"index":4,"similarity":0.45074189882665594}} -2022-11-16 17:46:13 INFO:  test-backend-node-wasm.js test face similarity alternative -2022-11-16 17:46:13 STATE: test-backend-node-wasm.js start face embeddings -2022-11-16 17:46:14 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34697856} -2022-11-16 17:46:14 STATE: test-backend-node-wasm.js event: image -2022-11-16 17:46:14 ERROR: test-backend-node-wasm.js failed: testDetect face embeddings -2022-11-16 17:46:14 ERROR: test-backend-node-wasm.js uncaughtException {"name":"TypeError","message":"Cannot read properties of undefined (reading 'img_inputs')","stack":["TypeError: Cannot read properties of undefined (reading 'img_inputs')"," at /home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30706:69"," at Array.reduce ()"," at GraphModel.normalizeInputs (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30705:32)"," at GraphModel.execute (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30777:23)"," at /home/vlado/dev/human/dist/human.node-wasm.js:99:176887"," at new Promise ()"," at U5 (/home/vlado/dev/human/dist/human.node-wasm.js:99:176656)"," at w1 (/home/vlado/dev/human/dist/human.node-wasm.js:121:6792)"," at process.processTicksAndRejections (node:internal/process/task_queues:95:5)"," at async /home/vlado/dev/human/dist/human.node-wasm.js:840:9036"]} -2022-11-16 17:46:14 STATE: all tests complete -2022-11-16 17:46:14 INFO:  status {"test":"../demo/nodejs/node.js","passed":1,"failed":0} -2022-11-16 17:46:14 INFO:  status {"test":"../demo/nodejs/node-simple.js","passed":1,"failed":0} -2022-11-16 17:46:14 INFO:  status {"test":"../demo/nodejs/node-event.js","passed":1,"failed":0} -2022-11-16 17:46:14 INFO:  status {"test":"../demo/nodejs/node-similarity.js","passed":1,"failed":0} -2022-11-16 17:46:14 INFO:  status {"test":"../demo/nodejs/node-canvas.js","passed":1,"failed":0} -2022-11-16 17:46:14 INFO:  status {"test":"../demo/nodejs/process-folder.js","passed":1,"failed":0} -2022-11-16 17:46:14 INFO:  status {"test":"../demo/multithread/node-multiprocess.js","passed":1,"failed":0} -2022-11-16 17:46:14 INFO:  status {"test":"../demo/facematch/node-match.js","passed":1,"failed":0} -2022-11-16 17:46:14 INFO:  status {"test":"test-node-load.js","passed":1,"failed":0} -2022-11-16 17:46:14 INFO:  status {"test":"test-node-gear.js","passed":3,"failed":0} -2022-11-16 17:46:14 INFO:  status {"test":"test-backend-node.js","passed":85,"failed":1} -2022-11-16 17:46:14 INFO:  status {"test":"test-backend-node-gpu.js","passed":85,"failed":1} -2022-11-16 17:46:14 INFO:  status {"test":"test-backend-node-wasm.js","passed":85,"failed":2} -2022-11-16 17:46:14 INFO:  failures {"count":4} -2022-11-16 17:46:14 WARN:  failed {"test":"test-backend-node.js","message":["error",["failed:","testDetect face embeddings"]]} -2022-11-16 17:46:14 WARN:  failed {"test":"test-backend-node-gpu.js","message":["error",["failed:","testDetect face embeddings"]]} -2022-11-16 17:46:14 WARN:  failed {"test":"test-backend-node-wasm.js","message":["error",["failed: nanodet",[]]]} -2022-11-16 17:46:14 WARN:  failed {"test":"test-backend-node-wasm.js","message":["error",["failed:","testDetect face embeddings"]]} +2022-11-17 14:20:18 INFO:  @vladmandic/human version 3.0.0 +2022-11-17 14:20:18 INFO:  User: vlado Platform: linux Arch: x64 Node: v19.1.0 +2022-11-17 14:20:18 INFO:  demos: [{"cmd":"../demo/nodejs/node.js","args":[]},{"cmd":"../demo/nodejs/node-simple.js","args":[]},{"cmd":"../demo/nodejs/node-event.js","args":["samples/in/ai-body.jpg"]},{"cmd":"../demo/nodejs/node-similarity.js","args":["samples/in/ai-face.jpg","samples/in/ai-upper.jpg"]},{"cmd":"../demo/nodejs/node-canvas.js","args":["samples/in/ai-body.jpg","samples/out/ai-body.jpg"]},{"cmd":"../demo/nodejs/process-folder.js","args":["samples"]},{"cmd":"../demo/multithread/node-multiprocess.js","args":[]},{"cmd":"../demo/facematch/node-match.js","args":[]}] +2022-11-17 14:20:18 INFO:  {"cmd":"../demo/nodejs/node.js","args":[]} start +2022-11-17 14:20:19 INFO:  {"cmd":"../demo/nodejs/node-simple.js","args":[]} start +2022-11-17 14:20:20 INFO:  {"cmd":"../demo/nodejs/node-event.js","args":["samples/in/ai-body.jpg"]} start +2022-11-17 14:20:21 INFO:  {"cmd":"../demo/nodejs/node-similarity.js","args":["samples/in/ai-face.jpg","samples/in/ai-upper.jpg"]} start +2022-11-17 14:20:21 INFO:  {"cmd":"../demo/nodejs/node-canvas.js","args":["samples/in/ai-body.jpg","samples/out/ai-body.jpg"]} start +2022-11-17 14:20:22 INFO:  {"cmd":"../demo/nodejs/process-folder.js","args":["samples"]} start +2022-11-17 14:20:24 INFO:  {"cmd":"../demo/multithread/node-multiprocess.js","args":[]} start +2022-11-17 14:20:36 INFO:  {"cmd":"../demo/facematch/node-match.js","args":[]} start +2022-11-17 14:20:36 INFO:  tests: ["test-node-load.js","test-node-gear.js","test-backend-node.js","test-backend-node-gpu.js","test-backend-node-wasm.js"] +2022-11-17 14:20:36 INFO:  +2022-11-17 14:20:36 INFO:  test-node-load.js start +2022-11-17 14:20:37 INFO:  test-node-load.js load start {"human":"3.0.0","tf":"4.0.0","progress":0} +2022-11-17 14:20:37 DATA:  test-node-load.js load interval {"elapsed":0,"progress":0} +2022-11-17 14:20:37 DATA:  test-node-load.js load interval {"elapsed":10,"progress":0} +2022-11-17 14:20:37 DATA:  test-node-load.js load interval {"elapsed":26,"progress":0.11143791531203556} +2022-11-17 14:20:37 DATA:  test-node-load.js load interval {"elapsed":42,"progress":0.3961518088579138} +2022-11-17 14:20:37 DATA:  test-node-load.js load interval {"elapsed":64,"progress":0.5125946867158943} +2022-11-17 14:20:37 DATA:  test-node-load.js load interval {"elapsed":75,"progress":0.7259096583739463} +2022-11-17 14:20:37 STATE: test-node-load.js passed {"progress":1} +2022-11-17 14:20:37 INFO:  test-node-load.js load final {"progress":1} +2022-11-17 14:20:37 DATA:  test-node-load.js load interval {"elapsed":424,"progress":1} +2022-11-17 14:20:37 INFO:  +2022-11-17 14:20:37 INFO:  test-node-gear.js start +2022-11-17 14:20:37 DATA:  test-node-gear.js input: ["samples/in/ai-face.jpg"] +2022-11-17 14:20:38 STATE: test-node-gear.js passed: gear faceres samples/in/ai-face.jpg +2022-11-17 14:20:38 DATA:  test-node-gear.js results {"face":0,"model":"faceres","image":"samples/in/ai-face.jpg","age":23.5,"gender":"female","genderScore":0.92} +2022-11-17 14:20:39 STATE: test-node-gear.js passed: gear gear samples/in/ai-face.jpg +2022-11-17 14:20:39 DATA:  test-node-gear.js results {"face":0,"model":"gear","image":"samples/in/ai-face.jpg","age":23.3,"gender":"female","genderScore":0.51,"race":[{"score":0.93,"race":"white"}]} +2022-11-17 14:20:39 STATE: test-node-gear.js passed: gear ssrnet samples/in/ai-face.jpg +2022-11-17 14:20:39 DATA:  test-node-gear.js results {"face":0,"model":"ssrnet","image":"samples/in/ai-face.jpg","age":23.4,"gender":"female","genderScore":0.99} +2022-11-17 14:20:39 INFO:  +2022-11-17 14:20:39 INFO:  test-backend-node.js start +2022-11-17 14:20:39 INFO:  test-backend-node.js test: configuration validation +2022-11-17 14:20:39 STATE: test-backend-node.js passed: configuration default validation [] +2022-11-17 14:20:39 STATE: test-backend-node.js passed: configuration invalid validation [{"reason":"unknown property","where":"config.invalid = true"}] +2022-11-17 14:20:39 INFO:  test-backend-node.js test: model load +2022-11-17 14:20:39 STATE: test-backend-node.js passed: models loaded 24 11 [{"name":"blazeface","loaded":true,"size":538928,"url":"file://models/blazeface.json"},{"name":"antispoof","loaded":true,"size":853098,"url":"file://models/antispoof.json"},{"name":"liveness","loaded":true,"size":592976,"url":"file://models/liveness.json"},{"name":"faceres","loaded":true,"size":6978814,"url":"file://models/faceres.json"},{"name":"emotion","loaded":true,"size":820516,"url":"file://models/emotion.json"},{"name":"iris","loaded":true,"size":2599092,"url":"file://models/iris.json"},{"name":"facemesh","loaded":true,"size":1477958,"url":"file://models/facemesh.json"},{"name":"gear","loaded":false,"size":0,"url":null},{"name":"ssrnetage","loaded":false,"size":0,"url":null},{"name":"ssrnetgender","loaded":false,"size":0,"url":null},{"name":"mobilefacenet","loaded":false,"size":0,"url":null},{"name":"insightface","loaded":false,"size":0,"url":null},{"name":"blazepose","loaded":false,"size":0,"url":null},{"name":"blazeposedetect","loaded":false,"size":0,"url":null},{"name":"efficientpose","loaded":false,"size":0,"url":null},{"name":"movenet","loaded":true,"size":4650216,"url":"file://models/movenet-lightning.json"},{"name":"posenet","loaded":false,"size":0,"url":null},{"name":"handtrack","loaded":true,"size":2964837,"url":"file://models/handtrack.json"},{"name":"handskeleton","loaded":true,"size":0},{"name":"centernet","loaded":true,"size":4030290,"url":"file://models/centernet.json"},{"name":"nanodet","loaded":false,"size":0,"url":null},{"name":"selfie","loaded":false,"size":0,"url":null},{"name":"meet","loaded":false,"size":0,"url":null},{"name":"rvm","loaded":false,"size":0,"url":null}] +2022-11-17 14:20:39 INFO:  test-backend-node.js memory: {"memory":{"unreliable":true,"numTensors":1785,"numDataBuffers":1785,"numBytes":63247332}} +2022-11-17 14:20:39 INFO:  test-backend-node.js state: {"state":{"registeredVariables":{},"nextTapeNodeId":0,"numBytes":63247332,"numTensors":1785,"numStringTensors":0,"numDataBuffers":1785,"gradientDepth":0,"kernelDepth":0,"scopeStack":[],"numDataMovesStack":[],"nextScopeId":0,"tensorInfo":{},"profiling":false,"activeProfile":{"newBytes":0,"newTensors":0,"peakBytes":0,"kernels":[],"result":null,"kernelNames":[]}}} +2022-11-17 14:20:39 INFO:  test-backend-node.js test: warmup +2022-11-17 14:20:39 STATE: test-backend-node.js passed: create human +2022-11-17 14:20:39 INFO:  test-backend-node.js human version: 3.0.0 +2022-11-17 14:20:39 INFO:  test-backend-node.js platform: linux x64 agent: NodeJS v19.1.0 +2022-11-17 14:20:39 INFO:  test-backend-node.js tfjs version: 4.0.0 +2022-11-17 14:20:39 INFO:  test-backend-node.js env: {"browser":false,"node":true,"platform":"linux x64","agent":"NodeJS v19.1.0","backends":["cpu","tensorflow"],"initial":false,"tfjs":{"version":"4.0.0"},"offscreen":false,"perfadd":false,"tensorflow":{"version":"2.9.1","gpu":false},"wasm":{"supported":true,"backend":false},"webgl":{"supported":false,"backend":false},"webgpu":{"supported":false,"backend":false},"cpu":{"flags":[]},"kernels":169} +2022-11-17 14:20:39 STATE: test-backend-node.js passed: set backend: tensorflow +2022-11-17 14:20:39 STATE: test-backend-node.js tensors 1785 +2022-11-17 14:20:39 STATE: test-backend-node.js passed: load models +2022-11-17 14:20:39 STATE: test-backend-node.js result: defined models: 2 loaded models: 2 +2022-11-17 14:20:39 STATE: test-backend-node.js passed: warmup: none default +2022-11-17 14:20:39 DATA:  test-backend-node.js result: face: 0 body: 0 hand: 0 gesture: 0 object: 0 person: 0 {} {} {} +2022-11-17 14:20:39 DATA:  test-backend-node.js result: performance: load: null total: null +2022-11-17 14:20:39 STATE: test-backend-node.js passed: warmup none result match +2022-11-17 14:20:39 STATE: test-backend-node.js event: image +2022-11-17 14:20:39 STATE: test-backend-node.js event: detect +2022-11-17 14:20:39 STATE: test-backend-node.js event: warmup +2022-11-17 14:20:39 STATE: test-backend-node.js passed: warmup: face default +2022-11-17 14:20:39 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.42,"keypoints":4} +2022-11-17 14:20:39 DATA:  test-backend-node.js result: performance: load: null total: 422 +2022-11-17 14:20:39 STATE: test-backend-node.js passed: warmup face result match +2022-11-17 14:20:39 STATE: test-backend-node.js event: image +2022-11-17 14:20:40 STATE: test-backend-node.js event: detect +2022-11-17 14:20:40 STATE: test-backend-node.js event: warmup +2022-11-17 14:20:40 STATE: test-backend-node.js passed: warmup: body default +2022-11-17 14:20:40 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:20:40 DATA:  test-backend-node.js result: performance: load: null total: 352 +2022-11-17 14:20:40 STATE: test-backend-node.js passed: warmup body result match +2022-11-17 14:20:40 STATE: test-backend-node.js details: {"face":{"boxScore":0.92,"faceScore":1,"age":23.7,"gender":"female","genderScore":0.97},"emotion":[{"score":0.63,"emotion":"angry"},{"score":0.22,"emotion":"fear"}],"body":{"score":0.92,"keypoints":17},"hand":{"boxScore":0.52,"fingerScore":0.73,"keypoints":21},"gestures":[{"face":0,"gesture":"facing right"},{"face":0,"gesture":"mouth 10% open"},{"hand":0,"gesture":"pinky forward"},{"hand":0,"gesture":"palm up"},{"hand":0,"gesture":"open palm"},{"iris":0,"gesture":"looking left"},{"iris":0,"gesture":"looking up"}]} +2022-11-17 14:20:40 INFO:  test-backend-node.js test: details verification +2022-11-17 14:20:40 STATE: test-backend-node.js start default +2022-11-17 14:20:40 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} +2022-11-17 14:20:40 STATE: test-backend-node.js event: image +2022-11-17 14:20:40 STATE: test-backend-node.js event: detect +2022-11-17 14:20:40 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg default +2022-11-17 14:20:40 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:20:40 DATA:  test-backend-node.js result: performance: load: null total: 319 +2022-11-17 14:20:40 STATE: test-backend-node.js passed: details face length 1 +2022-11-17 14:20:40 STATE: test-backend-node.js passed: details face score 1 0.93 1 +2022-11-17 14:20:40 STATE: test-backend-node.js passed: details face age/gender 23.7 female 0.97 2.34 +2022-11-17 14:20:40 STATE: test-backend-node.js passed: details face arrays 4 478 1024 +2022-11-17 14:20:40 STATE: test-backend-node.js passed: details face emotion 2 {"score":0.59,"emotion":"angry"} +2022-11-17 14:20:40 STATE: test-backend-node.js passed: details face anti-spoofing 0.79 +2022-11-17 14:20:40 STATE: test-backend-node.js passed: details face liveness 0.83 +2022-11-17 14:20:40 STATE: test-backend-node.js passed: details body length 1 +2022-11-17 14:20:40 STATE: test-backend-node.js passed: details body 0.92 17 6 +2022-11-17 14:20:40 STATE: test-backend-node.js passed: details hand length 1 +2022-11-17 14:20:40 STATE: test-backend-node.js passed: details hand 0.51 0.73 point +2022-11-17 14:20:40 STATE: test-backend-node.js passed: details hand arrays 21 5 7 +2022-11-17 14:20:40 STATE: test-backend-node.js passed: details gesture length 7 +2022-11-17 14:20:40 STATE: test-backend-node.js passed: details gesture first {"face":0,"gesture":"facing right"} +2022-11-17 14:20:40 STATE: test-backend-node.js passed: details object length 1 +2022-11-17 14:20:40 STATE: test-backend-node.js passed: details object 0.72 person +2022-11-17 14:20:40 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1371996928} +2022-11-17 14:20:40 STATE: test-backend-node.js event: image +2022-11-17 14:20:41 STATE: test-backend-node.js event: detect +2022-11-17 14:20:41 STATE: test-backend-node.js passed: tensor shape: [1,1200,1200,4] dtype: float32 +2022-11-17 14:20:41 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1200,1200,4] {"checksum":1371996928} +2022-11-17 14:20:41 STATE: test-backend-node.js event: image +2022-11-17 14:20:41 STATE: test-backend-node.js event: detect +2022-11-17 14:20:41 STATE: test-backend-node.js passed: tensor shape: [1200,1200,4] dtype: float32 +2022-11-17 14:20:41 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} +2022-11-17 14:20:41 STATE: test-backend-node.js event: image +2022-11-17 14:20:42 STATE: test-backend-node.js event: detect +2022-11-17 14:20:42 STATE: test-backend-node.js passed: tensor shape: [1,1200,1200,3] dtype: float32 +2022-11-17 14:20:42 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1200,1200,3] {"checksum":1004796864} +2022-11-17 14:20:42 STATE: test-backend-node.js event: image +2022-11-17 14:20:42 STATE: test-backend-node.js event: detect +2022-11-17 14:20:42 STATE: test-backend-node.js passed: tensor shape: [1200,1200,3] dtype: float32 +2022-11-17 14:20:42 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1371996871} +2022-11-17 14:20:42 STATE: test-backend-node.js event: image +2022-11-17 14:20:43 STATE: test-backend-node.js event: detect +2022-11-17 14:20:43 STATE: test-backend-node.js passed: tensor shape: [1,1200,1200,4] dtype: int32 +2022-11-17 14:20:43 INFO:  test-backend-node.js test default +2022-11-17 14:20:43 STATE: test-backend-node.js start async +2022-11-17 14:20:43 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} +2022-11-17 14:20:43 STATE: test-backend-node.js event: image +2022-11-17 14:20:43 STATE: test-backend-node.js event: detect +2022-11-17 14:20:43 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg async +2022-11-17 14:20:43 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:20:43 DATA:  test-backend-node.js result: performance: load: null total: 293 +2022-11-17 14:20:43 STATE: test-backend-node.js passed: default result face match 1 female 0.97 +2022-11-17 14:20:43 INFO:  test-backend-node.js test sync +2022-11-17 14:20:43 STATE: test-backend-node.js start sync +2022-11-17 14:20:43 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} +2022-11-17 14:20:43 STATE: test-backend-node.js event: image +2022-11-17 14:20:44 STATE: test-backend-node.js event: detect +2022-11-17 14:20:44 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg sync +2022-11-17 14:20:44 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:20:44 DATA:  test-backend-node.js result: performance: load: null total: 312 +2022-11-17 14:20:44 STATE: test-backend-node.js passed: default sync 1 female 0.97 +2022-11-17 14:20:44 INFO:  test-backend-node.js test: image process +2022-11-17 14:20:44 STATE: test-backend-node.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120} +2022-11-17 14:20:44 STATE: test-backend-node.js passed: image input null [1,256,256,3] +2022-11-17 14:20:44 INFO:  test-backend-node.js test: image null +2022-11-17 14:20:44 STATE: test-backend-node.js passed: invalid input could not convert input to tensor +2022-11-17 14:20:44 INFO:  test-backend-node.js test face similarity +2022-11-17 14:20:44 STATE: test-backend-node.js start face similarity +2022-11-17 14:20:44 STATE: test-backend-node.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120} +2022-11-17 14:20:44 STATE: test-backend-node.js event: image +2022-11-17 14:20:44 STATE: test-backend-node.js event: detect +2022-11-17 14:20:44 STATE: test-backend-node.js passed: detect: samples/in/ai-face.jpg face similarity +2022-11-17 14:20:44 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 6 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":3} +2022-11-17 14:20:44 DATA:  test-backend-node.js result: performance: load: null total: 294 +2022-11-17 14:20:44 STATE: test-backend-node.js start face similarity +2022-11-17 14:20:44 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} +2022-11-17 14:20:44 STATE: test-backend-node.js event: image +2022-11-17 14:20:45 STATE: test-backend-node.js event: detect +2022-11-17 14:20:45 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg face similarity +2022-11-17 14:20:45 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:20:45 DATA:  test-backend-node.js result: performance: load: null total: 290 +2022-11-17 14:20:45 STATE: test-backend-node.js start face similarity +2022-11-17 14:20:45 STATE: test-backend-node.js passed: load image: samples/in/ai-upper.jpg [1,720,688,3] {"checksum":151289024} +2022-11-17 14:20:45 STATE: test-backend-node.js event: image +2022-11-17 14:20:45 STATE: test-backend-node.js event: detect +2022-11-17 14:20:45 STATE: test-backend-node.js passed: detect: samples/in/ai-upper.jpg face similarity +2022-11-17 14:20:45 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.75,"keypoints":7} +2022-11-17 14:20:45 DATA:  test-backend-node.js result: performance: load: null total: 269 +2022-11-17 14:20:45 STATE: test-backend-node.js passed: face descriptor +2022-11-17 14:20:45 STATE: test-backend-node.js passed: face similarity {"similarity":[1,0.44727441595492046,0.556793560189727],"descriptors":[1024,1024,1024]} +2022-11-17 14:20:45 INFO:  test-backend-node.js test object +2022-11-17 14:20:45 STATE: test-backend-node.js start object +2022-11-17 14:20:45 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} +2022-11-17 14:20:45 STATE: test-backend-node.js event: image +2022-11-17 14:20:45 STATE: test-backend-node.js event: detect +2022-11-17 14:20:45 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg object +2022-11-17 14:20:45 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:20:45 DATA:  test-backend-node.js result: performance: load: null total: 298 +2022-11-17 14:20:45 STATE: test-backend-node.js passed: centernet +2022-11-17 14:20:45 STATE: test-backend-node.js start object +2022-11-17 14:20:47 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} +2022-11-17 14:20:47 STATE: test-backend-node.js event: image +2022-11-17 14:20:47 STATE: test-backend-node.js event: detect +2022-11-17 14:20:47 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg object +2022-11-17 14:20:47 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 3 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.86,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:20:47 DATA:  test-backend-node.js result: performance: load: null total: 297 +2022-11-17 14:20:47 STATE: test-backend-node.js passed: nanodet +2022-11-17 14:20:47 INFO:  test-backend-node.js test sensitive +2022-11-17 14:20:47 STATE: test-backend-node.js start sensitive +2022-11-17 14:20:47 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} +2022-11-17 14:20:47 STATE: test-backend-node.js event: image +2022-11-17 14:20:47 STATE: test-backend-node.js event: detect +2022-11-17 14:20:47 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg sensitive +2022-11-17 14:20:47 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.92,"keypoints":17} +2022-11-17 14:20:47 DATA:  test-backend-node.js result: performance: load: null total: 253 +2022-11-17 14:20:47 STATE: test-backend-node.js passed: sensitive result match +2022-11-17 14:20:47 STATE: test-backend-node.js passed: sensitive face result match +2022-11-17 14:20:47 STATE: test-backend-node.js passed: sensitive face emotion result [{"score":0.59,"emotion":"angry"},{"score":0.29,"emotion":"fear"}] +2022-11-17 14:20:47 STATE: test-backend-node.js passed: sensitive body result match +2022-11-17 14:20:47 STATE: test-backend-node.js passed: sensitive hand result match +2022-11-17 14:20:47 INFO:  test-backend-node.js test body +2022-11-17 14:20:47 STATE: test-backend-node.js start blazepose +2022-11-17 14:20:50 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} +2022-11-17 14:20:50 STATE: test-backend-node.js event: image +2022-11-17 14:20:50 STATE: test-backend-node.js event: detect +2022-11-17 14:20:50 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg blazepose +2022-11-17 14:20:50 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.99,"keypoints":39} +2022-11-17 14:20:50 DATA:  test-backend-node.js result: performance: load: null total: 336 +2022-11-17 14:20:50 STATE: test-backend-node.js passed: blazepose +2022-11-17 14:20:50 STATE: test-backend-node.js start efficientpose +2022-11-17 14:20:51 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} +2022-11-17 14:20:51 STATE: test-backend-node.js event: image +2022-11-17 14:20:51 STATE: test-backend-node.js event: detect +2022-11-17 14:20:51 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg efficientpose +2022-11-17 14:20:51 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.75,"keypoints":13} +2022-11-17 14:20:51 DATA:  test-backend-node.js result: performance: load: null total: 306 +2022-11-17 14:20:51 STATE: test-backend-node.js passed: efficientpose +2022-11-17 14:20:51 STATE: test-backend-node.js start posenet +2022-11-17 14:20:52 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} +2022-11-17 14:20:52 STATE: test-backend-node.js event: image +2022-11-17 14:20:52 STATE: test-backend-node.js event: detect +2022-11-17 14:20:52 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg posenet +2022-11-17 14:20:52 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.96,"keypoints":16} +2022-11-17 14:20:52 DATA:  test-backend-node.js result: performance: load: null total: 255 +2022-11-17 14:20:52 STATE: test-backend-node.js passed: posenet +2022-11-17 14:20:52 STATE: test-backend-node.js start movenet +2022-11-17 14:20:52 STATE: test-backend-node.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796864} +2022-11-17 14:20:52 STATE: test-backend-node.js event: image +2022-11-17 14:20:52 STATE: test-backend-node.js event: detect +2022-11-17 14:20:52 STATE: test-backend-node.js passed: detect: samples/in/ai-body.jpg movenet +2022-11-17 14:20:52 DATA:  test-backend-node.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.92,"keypoints":17} +2022-11-17 14:20:52 DATA:  test-backend-node.js result: performance: load: null total: 249 +2022-11-17 14:20:52 STATE: test-backend-node.js passed: movenet +2022-11-17 14:20:52 INFO:  test-backend-node.js test face matching +2022-11-17 14:20:52 STATE: test-backend-node.js passed: face database 40 +2022-11-17 14:20:52 STATE: test-backend-node.js passed: face match {"first":{"index":4,"similarity":0.7827852251220577}} {"second":{"index":4,"similarity":0.5002052057057577}} {"third":{"index":4,"similarity":0.5401588464054732}} +2022-11-17 14:20:52 INFO:  test-backend-node.js test face similarity alternative +2022-11-17 14:20:52 STATE: test-backend-node.js start face embeddings +2022-11-17 14:20:53 STATE: test-backend-node.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120} +2022-11-17 14:20:53 STATE: test-backend-node.js event: image +2022-11-17 14:20:53 ERROR: test-backend-node.js failed: testDetect face embeddings +2022-11-17 14:20:53 ERROR: test-backend-node.js uncaughtException {"name":"TypeError","message":"Cannot read properties of undefined (reading 'img_inputs')","stack":["TypeError: Cannot read properties of undefined (reading 'img_inputs')"," at /home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30706:69"," at Array.reduce ()"," at GraphModel.normalizeInputs (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30705:32)"," at GraphModel.execute (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30777:23)"," at /home/vlado/dev/human/dist/human.node.js:121:37595"," at new Promise ()"," at V5 (/home/vlado/dev/human/dist/human.node.js:121:37364)"," at q5 (/home/vlado/dev/human/dist/human.node.js:121:43414)"," at process.processTicksAndRejections (node:internal/process/task_queues:95:5)"," at async /home/vlado/dev/human/dist/human.node.js:840:8563"]} +2022-11-17 14:20:53 INFO:  +2022-11-17 14:20:53 INFO:  test-backend-node-gpu.js start +2022-11-17 14:20:54 INFO:  test-backend-node-gpu.js test: configuration validation +2022-11-17 14:20:54 STATE: test-backend-node-gpu.js passed: configuration default validation [] +2022-11-17 14:20:54 STATE: test-backend-node-gpu.js passed: configuration invalid validation [{"reason":"unknown property","where":"config.invalid = true"}] +2022-11-17 14:20:54 INFO:  test-backend-node-gpu.js test: model load +2022-11-17 14:20:54 STATE: test-backend-node-gpu.js passed: models loaded 24 11 [{"name":"blazeface","loaded":true,"size":538928,"url":"file://models/blazeface.json"},{"name":"antispoof","loaded":true,"size":853098,"url":"file://models/antispoof.json"},{"name":"liveness","loaded":true,"size":592976,"url":"file://models/liveness.json"},{"name":"faceres","loaded":true,"size":6978814,"url":"file://models/faceres.json"},{"name":"emotion","loaded":true,"size":820516,"url":"file://models/emotion.json"},{"name":"iris","loaded":true,"size":2599092,"url":"file://models/iris.json"},{"name":"facemesh","loaded":true,"size":1477958,"url":"file://models/facemesh.json"},{"name":"gear","loaded":false,"size":0,"url":null},{"name":"ssrnetage","loaded":false,"size":0,"url":null},{"name":"ssrnetgender","loaded":false,"size":0,"url":null},{"name":"mobilefacenet","loaded":false,"size":0,"url":null},{"name":"insightface","loaded":false,"size":0,"url":null},{"name":"blazepose","loaded":false,"size":0,"url":null},{"name":"blazeposedetect","loaded":false,"size":0,"url":null},{"name":"efficientpose","loaded":false,"size":0,"url":null},{"name":"movenet","loaded":true,"size":4650216,"url":"file://models/movenet-lightning.json"},{"name":"posenet","loaded":false,"size":0,"url":null},{"name":"handtrack","loaded":true,"size":2964837,"url":"file://models/handtrack.json"},{"name":"handskeleton","loaded":true,"size":0},{"name":"centernet","loaded":true,"size":4030290,"url":"file://models/centernet.json"},{"name":"nanodet","loaded":false,"size":0,"url":null},{"name":"selfie","loaded":false,"size":0,"url":null},{"name":"meet","loaded":false,"size":0,"url":null},{"name":"rvm","loaded":false,"size":0,"url":null}] +2022-11-17 14:20:54 INFO:  test-backend-node-gpu.js memory: {"memory":{"unreliable":true,"numTensors":1785,"numDataBuffers":1785,"numBytes":63247332}} +2022-11-17 14:20:54 INFO:  test-backend-node-gpu.js state: {"state":{"registeredVariables":{},"nextTapeNodeId":0,"numBytes":63247332,"numTensors":1785,"numStringTensors":0,"numDataBuffers":1785,"gradientDepth":0,"kernelDepth":0,"scopeStack":[],"numDataMovesStack":[],"nextScopeId":0,"tensorInfo":{},"profiling":false,"activeProfile":{"newBytes":0,"newTensors":0,"peakBytes":0,"kernels":[],"result":null,"kernelNames":[]}}} +2022-11-17 14:20:54 INFO:  test-backend-node-gpu.js test: warmup +2022-11-17 14:20:54 STATE: test-backend-node-gpu.js passed: create human +2022-11-17 14:20:54 INFO:  test-backend-node-gpu.js human version: 3.0.0 +2022-11-17 14:20:54 INFO:  test-backend-node-gpu.js platform: linux x64 agent: NodeJS v19.1.0 +2022-11-17 14:20:54 INFO:  test-backend-node-gpu.js tfjs version: 4.0.0 +2022-11-17 14:20:54 INFO:  test-backend-node-gpu.js env: {"browser":false,"node":true,"platform":"linux x64","agent":"NodeJS v19.1.0","backends":["cpu","tensorflow"],"initial":false,"tfjs":{"version":"4.0.0"},"offscreen":false,"perfadd":false,"tensorflow":{"version":"2.9.1","gpu":true},"wasm":{"supported":true,"backend":false},"webgl":{"supported":false,"backend":false},"webgpu":{"supported":false,"backend":false},"cpu":{"flags":[]},"kernels":169} +2022-11-17 14:20:54 STATE: test-backend-node-gpu.js passed: set backend: tensorflow +2022-11-17 14:20:54 STATE: test-backend-node-gpu.js tensors 1785 +2022-11-17 14:20:54 STATE: test-backend-node-gpu.js passed: load models +2022-11-17 14:20:54 STATE: test-backend-node-gpu.js result: defined models: 2 loaded models: 2 +2022-11-17 14:20:54 STATE: test-backend-node-gpu.js passed: warmup: none default +2022-11-17 14:20:54 DATA:  test-backend-node-gpu.js result: face: 0 body: 0 hand: 0 gesture: 0 object: 0 person: 0 {} {} {} +2022-11-17 14:20:54 DATA:  test-backend-node-gpu.js result: performance: load: null total: null +2022-11-17 14:20:54 STATE: test-backend-node-gpu.js passed: warmup none result match +2022-11-17 14:20:54 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:20:57 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:20:57 STATE: test-backend-node-gpu.js event: warmup +2022-11-17 14:20:57 STATE: test-backend-node-gpu.js passed: warmup: face default +2022-11-17 14:20:57 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.42,"keypoints":4} +2022-11-17 14:20:57 DATA:  test-backend-node-gpu.js result: performance: load: null total: 2683 +2022-11-17 14:20:57 STATE: test-backend-node-gpu.js passed: warmup face result match +2022-11-17 14:20:57 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:20:57 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:20:57 STATE: test-backend-node-gpu.js event: warmup +2022-11-17 14:20:57 STATE: test-backend-node-gpu.js passed: warmup: body default +2022-11-17 14:20:57 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:20:57 DATA:  test-backend-node-gpu.js result: performance: load: null total: 144 +2022-11-17 14:20:57 STATE: test-backend-node-gpu.js passed: warmup body result match +2022-11-17 14:20:57 STATE: test-backend-node-gpu.js details: {"face":{"boxScore":0.92,"faceScore":1,"age":23.7,"gender":"female","genderScore":0.97},"emotion":[{"score":0.63,"emotion":"angry"},{"score":0.22,"emotion":"fear"}],"body":{"score":0.92,"keypoints":17},"hand":{"boxScore":0.52,"fingerScore":0.73,"keypoints":21},"gestures":[{"face":0,"gesture":"facing right"},{"face":0,"gesture":"mouth 10% open"},{"hand":0,"gesture":"pinky forward"},{"hand":0,"gesture":"palm up"},{"hand":0,"gesture":"open palm"},{"iris":0,"gesture":"looking left"},{"iris":0,"gesture":"looking up"}]} +2022-11-17 14:20:57 INFO:  test-backend-node-gpu.js test: details verification +2022-11-17 14:20:57 STATE: test-backend-node-gpu.js start default +2022-11-17 14:20:57 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} +2022-11-17 14:20:57 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg default +2022-11-17 14:20:58 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:20:58 DATA:  test-backend-node-gpu.js result: performance: load: null total: 144 +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: details face length 1 +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: details face score 1 0.93 1 +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: details face age/gender 23.7 female 0.97 2.34 +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: details face arrays 4 478 1024 +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: details face emotion 2 {"score":0.59,"emotion":"angry"} +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: details face anti-spoofing 0.79 +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: details face liveness 0.83 +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: details body length 1 +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: details body 0.92 17 6 +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: details hand length 1 +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: details hand 0.51 0.73 point +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: details hand arrays 21 5 7 +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: details gesture length 7 +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: details gesture first {"face":0,"gesture":"facing right"} +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: details object length 1 +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: details object 0.72 person +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1371996928} +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: tensor shape: [1,1200,1200,4] dtype: float32 +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1200,1200,4] {"checksum":1371996928} +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: tensor shape: [1200,1200,4] dtype: float32 +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} +2022-11-17 14:20:58 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:20:59 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:20:59 STATE: test-backend-node-gpu.js passed: tensor shape: [1,1200,1200,3] dtype: float32 +2022-11-17 14:20:59 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1200,1200,3] {"checksum":1004796928} +2022-11-17 14:20:59 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:20:59 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:20:59 STATE: test-backend-node-gpu.js passed: tensor shape: [1200,1200,3] dtype: float32 +2022-11-17 14:20:59 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1371996871} +2022-11-17 14:20:59 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:20:59 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:20:59 STATE: test-backend-node-gpu.js passed: tensor shape: [1,1200,1200,4] dtype: int32 +2022-11-17 14:20:59 INFO:  test-backend-node-gpu.js test default +2022-11-17 14:20:59 STATE: test-backend-node-gpu.js start async +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg async +2022-11-17 14:21:00 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:21:00 DATA:  test-backend-node-gpu.js result: performance: load: null total: 113 +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js passed: default result face match 1 female 0.97 +2022-11-17 14:21:00 INFO:  test-backend-node-gpu.js test sync +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js start sync +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg sync +2022-11-17 14:21:00 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:21:00 DATA:  test-backend-node-gpu.js result: performance: load: null total: 116 +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js passed: default sync 1 female 0.97 +2022-11-17 14:21:00 INFO:  test-backend-node-gpu.js test: image process +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120} +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js passed: image input null [1,256,256,3] +2022-11-17 14:21:00 INFO:  test-backend-node-gpu.js test: image null +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js passed: invalid input could not convert input to tensor +2022-11-17 14:21:00 INFO:  test-backend-node-gpu.js test face similarity +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js start face similarity +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120} +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-face.jpg face similarity +2022-11-17 14:21:00 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 6 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":3} +2022-11-17 14:21:00 DATA:  test-backend-node-gpu.js result: performance: load: null total: 122 +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js start face similarity +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg face similarity +2022-11-17 14:21:00 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:21:00 DATA:  test-backend-node-gpu.js result: performance: load: null total: 135 +2022-11-17 14:21:00 STATE: test-backend-node-gpu.js start face similarity +2022-11-17 14:21:01 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-upper.jpg [1,720,688,3] {"checksum":151289056} +2022-11-17 14:21:01 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:21:01 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:21:01 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-upper.jpg face similarity +2022-11-17 14:21:01 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.75,"keypoints":7} +2022-11-17 14:21:01 DATA:  test-backend-node-gpu.js result: performance: load: null total: 119 +2022-11-17 14:21:01 STATE: test-backend-node-gpu.js passed: face descriptor +2022-11-17 14:21:01 STATE: test-backend-node-gpu.js passed: face similarity {"similarity":[1,0.4475002983522097,0.5570879556505012],"descriptors":[1024,1024,1024]} +2022-11-17 14:21:01 INFO:  test-backend-node-gpu.js test object +2022-11-17 14:21:01 STATE: test-backend-node-gpu.js start object +2022-11-17 14:21:01 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} +2022-11-17 14:21:01 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:21:01 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:21:01 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg object +2022-11-17 14:21:01 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:21:01 DATA:  test-backend-node-gpu.js result: performance: load: null total: 111 +2022-11-17 14:21:01 STATE: test-backend-node-gpu.js passed: centernet +2022-11-17 14:21:01 STATE: test-backend-node-gpu.js start object +2022-11-17 14:21:02 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} +2022-11-17 14:21:02 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:21:02 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:21:02 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg object +2022-11-17 14:21:02 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 3 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.86,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:21:02 DATA:  test-backend-node-gpu.js result: performance: load: null total: 388 +2022-11-17 14:21:02 STATE: test-backend-node-gpu.js passed: nanodet +2022-11-17 14:21:02 INFO:  test-backend-node-gpu.js test sensitive +2022-11-17 14:21:02 STATE: test-backend-node-gpu.js start sensitive +2022-11-17 14:21:02 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} +2022-11-17 14:21:02 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:21:03 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:21:03 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg sensitive +2022-11-17 14:21:03 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.92,"keypoints":17} +2022-11-17 14:21:03 DATA:  test-backend-node-gpu.js result: performance: load: null total: 112 +2022-11-17 14:21:03 STATE: test-backend-node-gpu.js passed: sensitive result match +2022-11-17 14:21:03 STATE: test-backend-node-gpu.js passed: sensitive face result match +2022-11-17 14:21:03 STATE: test-backend-node-gpu.js passed: sensitive face emotion result [{"score":0.59,"emotion":"angry"},{"score":0.29,"emotion":"fear"}] +2022-11-17 14:21:03 STATE: test-backend-node-gpu.js passed: sensitive body result match +2022-11-17 14:21:03 STATE: test-backend-node-gpu.js passed: sensitive hand result match +2022-11-17 14:21:03 INFO:  test-backend-node-gpu.js test body +2022-11-17 14:21:03 STATE: test-backend-node-gpu.js start blazepose +2022-11-17 14:21:05 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} +2022-11-17 14:21:05 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:21:05 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:21:05 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg blazepose +2022-11-17 14:21:05 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.99,"keypoints":39} +2022-11-17 14:21:05 DATA:  test-backend-node-gpu.js result: performance: load: null total: 269 +2022-11-17 14:21:05 STATE: test-backend-node-gpu.js passed: blazepose +2022-11-17 14:21:05 STATE: test-backend-node-gpu.js start efficientpose +2022-11-17 14:21:05 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} +2022-11-17 14:21:05 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:21:06 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:21:06 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg efficientpose +2022-11-17 14:21:06 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.75,"keypoints":13} +2022-11-17 14:21:06 DATA:  test-backend-node-gpu.js result: performance: load: null total: 972 +2022-11-17 14:21:06 STATE: test-backend-node-gpu.js passed: efficientpose +2022-11-17 14:21:06 STATE: test-backend-node-gpu.js start posenet +2022-11-17 14:21:07 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} +2022-11-17 14:21:07 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:21:07 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:21:07 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg posenet +2022-11-17 14:21:07 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.96,"keypoints":16} +2022-11-17 14:21:07 DATA:  test-backend-node-gpu.js result: performance: load: null total: 125 +2022-11-17 14:21:07 STATE: test-backend-node-gpu.js passed: posenet +2022-11-17 14:21:07 STATE: test-backend-node-gpu.js start movenet +2022-11-17 14:21:07 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1004796928} +2022-11-17 14:21:07 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:21:07 STATE: test-backend-node-gpu.js event: detect +2022-11-17 14:21:07 STATE: test-backend-node-gpu.js passed: detect: samples/in/ai-body.jpg movenet +2022-11-17 14:21:07 DATA:  test-backend-node-gpu.js result: face: 1 body: 1 hand: 2 gesture: 9 object: 0 person: 1 {"score":1,"age":23.7,"gender":"female"} {} {"score":0.92,"keypoints":17} +2022-11-17 14:21:07 DATA:  test-backend-node-gpu.js result: performance: load: null total: 99 +2022-11-17 14:21:07 STATE: test-backend-node-gpu.js passed: movenet +2022-11-17 14:21:07 INFO:  test-backend-node-gpu.js test face matching +2022-11-17 14:21:07 STATE: test-backend-node-gpu.js passed: face database 40 +2022-11-17 14:21:07 STATE: test-backend-node-gpu.js passed: face match {"first":{"index":4,"similarity":0.7829338043932047}} {"second":{"index":4,"similarity":0.5002928781584631}} {"third":{"index":4,"similarity":0.5402934771672516}} +2022-11-17 14:21:07 INFO:  test-backend-node-gpu.js test face similarity alternative +2022-11-17 14:21:07 STATE: test-backend-node-gpu.js start face embeddings +2022-11-17 14:21:08 STATE: test-backend-node-gpu.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34696120} +2022-11-17 14:21:08 STATE: test-backend-node-gpu.js event: image +2022-11-17 14:21:08 ERROR: test-backend-node-gpu.js failed: testDetect face embeddings +2022-11-17 14:21:08 ERROR: test-backend-node-gpu.js uncaughtException {"name":"TypeError","message":"Cannot read properties of undefined (reading 'img_inputs')","stack":["TypeError: Cannot read properties of undefined (reading 'img_inputs')"," at /home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30706:69"," at Array.reduce ()"," at GraphModel.normalizeInputs (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30705:32)"," at GraphModel.execute (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30777:23)"," at /home/vlado/dev/human/dist/human.node-gpu.js:121:37595"," at new Promise ()"," at V5 (/home/vlado/dev/human/dist/human.node-gpu.js:121:37364)"," at q5 (/home/vlado/dev/human/dist/human.node-gpu.js:121:43414)"," at process.processTicksAndRejections (node:internal/process/task_queues:95:5)"," at async /home/vlado/dev/human/dist/human.node-gpu.js:840:8563"]} +2022-11-17 14:21:08 INFO:  +2022-11-17 14:21:08 INFO:  test-backend-node-wasm.js start +2022-11-17 14:21:08 DATA:  test-backend-node-wasm.js stdout: 2022-11-17 14:21:08 INFO:  { supported: true, backend: true, simd: undefined, multithread: undefined } https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm@4.0.0/dist/ +2022-11-17 14:21:09 STATE: test-backend-node-wasm.js passed: model server: https://vladmandic.github.io/human-models/models/ +2022-11-17 14:21:09 INFO:  test-backend-node-wasm.js test: configuration validation +2022-11-17 14:21:09 STATE: test-backend-node-wasm.js passed: configuration default validation [] +2022-11-17 14:21:09 STATE: test-backend-node-wasm.js passed: configuration invalid validation [{"reason":"unknown property","where":"config.invalid = true"}] +2022-11-17 14:21:09 INFO:  test-backend-node-wasm.js test: model load +2022-11-17 14:21:11 STATE: test-backend-node-wasm.js passed: models loaded 24 11 [{"name":"blazeface","loaded":true,"size":538928,"url":"https://vladmandic.github.io/human-models/models/blazeface.json"},{"name":"antispoof","loaded":true,"size":853098,"url":"https://vladmandic.github.io/human-models/models/antispoof.json"},{"name":"liveness","loaded":true,"size":592976,"url":"https://vladmandic.github.io/human-models/models/liveness.json"},{"name":"faceres","loaded":true,"size":6978814,"url":"https://vladmandic.github.io/human-models/models/faceres.json"},{"name":"emotion","loaded":true,"size":820516,"url":"https://vladmandic.github.io/human-models/models/emotion.json"},{"name":"iris","loaded":true,"size":2599092,"url":"https://vladmandic.github.io/human-models/models/iris.json"},{"name":"facemesh","loaded":true,"size":1477958,"url":"https://vladmandic.github.io/human-models/models/facemesh.json"},{"name":"gear","loaded":false,"size":0,"url":null},{"name":"ssrnetage","loaded":false,"size":0,"url":null},{"name":"ssrnetgender","loaded":false,"size":0,"url":null},{"name":"mobilefacenet","loaded":false,"size":0,"url":null},{"name":"insightface","loaded":false,"size":0,"url":null},{"name":"blazepose","loaded":false,"size":0,"url":null},{"name":"blazeposedetect","loaded":false,"size":0,"url":null},{"name":"efficientpose","loaded":false,"size":0,"url":null},{"name":"movenet","loaded":true,"size":4650216,"url":"https://vladmandic.github.io/human-models/models/movenet-lightning.json"},{"name":"posenet","loaded":false,"size":0,"url":null},{"name":"handtrack","loaded":true,"size":2964837,"url":"https://vladmandic.github.io/human-models/models/handtrack.json"},{"name":"handskeleton","loaded":true,"size":0},{"name":"centernet","loaded":true,"size":4030290,"url":"https://vladmandic.github.io/human-models/models/centernet.json"},{"name":"nanodet","loaded":false,"size":0,"url":null},{"name":"selfie","loaded":false,"size":0,"url":null},{"name":"meet","loaded":false,"size":0,"url":null},{"name":"rvm","loaded":false,"size":0,"url":null}] +2022-11-17 14:21:11 INFO:  test-backend-node-wasm.js memory: {"memory":{"unreliable":false,"numTensors":1785,"numDataBuffers":1785,"numBytes":63247332}} +2022-11-17 14:21:11 INFO:  test-backend-node-wasm.js state: {"state":{"registeredVariables":{},"nextTapeNodeId":0,"numBytes":63247332,"numTensors":1785,"numStringTensors":0,"numDataBuffers":1785,"gradientDepth":0,"kernelDepth":0,"scopeStack":[],"numDataMovesStack":[],"nextScopeId":0,"tensorInfo":{},"profiling":false,"activeProfile":{"newBytes":0,"newTensors":0,"peakBytes":0,"kernels":[],"result":null,"kernelNames":[]}}} +2022-11-17 14:21:11 INFO:  test-backend-node-wasm.js test: warmup +2022-11-17 14:21:11 STATE: test-backend-node-wasm.js passed: create human +2022-11-17 14:21:11 INFO:  test-backend-node-wasm.js human version: 3.0.0 +2022-11-17 14:21:11 INFO:  test-backend-node-wasm.js platform: linux x64 agent: NodeJS v19.1.0 +2022-11-17 14:21:11 INFO:  test-backend-node-wasm.js tfjs version: 4.0.0 +2022-11-17 14:21:11 INFO:  test-backend-node-wasm.js env: {"browser":false,"node":true,"platform":"linux x64","agent":"NodeJS v19.1.0","backends":["wasm"],"initial":false,"tfjs":{"version":"4.0.0"},"offscreen":false,"perfadd":false,"tensorflow":{},"wasm":{"supported":true,"backend":true,"simd":true,"multithread":false},"webgl":{"supported":false,"backend":false},"webgpu":{"supported":false,"backend":false},"cpu":{"flags":[]},"kernels":126} +2022-11-17 14:21:11 STATE: test-backend-node-wasm.js passed: set backend: wasm +2022-11-17 14:21:11 STATE: test-backend-node-wasm.js tensors 1785 +2022-11-17 14:21:11 STATE: test-backend-node-wasm.js passed: load models +2022-11-17 14:21:11 STATE: test-backend-node-wasm.js result: defined models: 2 loaded models: 2 +2022-11-17 14:21:11 STATE: test-backend-node-wasm.js passed: warmup: none default +2022-11-17 14:21:11 DATA:  test-backend-node-wasm.js result: face: 0 body: 0 hand: 0 gesture: 0 object: 0 person: 0 {} {} {} +2022-11-17 14:21:11 DATA:  test-backend-node-wasm.js result: performance: load: null total: null +2022-11-17 14:21:11 STATE: test-backend-node-wasm.js passed: warmup none result match +2022-11-17 14:21:11 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:11 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:11 STATE: test-backend-node-wasm.js event: warmup +2022-11-17 14:21:11 STATE: test-backend-node-wasm.js passed: warmup: face default +2022-11-17 14:21:11 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 6 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":3} +2022-11-17 14:21:11 DATA:  test-backend-node-wasm.js result: performance: load: null total: 513 +2022-11-17 14:21:11 STATE: test-backend-node-wasm.js passed: warmup face result match +2022-11-17 14:21:11 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js event: warmup +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: warmup: body default +2022-11-17 14:21:12 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:21:12 DATA:  test-backend-node-wasm.js result: performance: load: null total: 342 +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: warmup body result match +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js details: {"face":{"boxScore":0.93,"faceScore":1,"age":23.7,"gender":"female","genderScore":0.97},"emotion":[{"score":0.59,"emotion":"angry"},{"score":0.29,"emotion":"fear"}],"body":{"score":0.92,"keypoints":17},"hand":{"boxScore":0.51,"fingerScore":0.73,"keypoints":21},"gestures":[{"face":0,"gesture":"facing right"},{"face":0,"gesture":"mouth 21% open"},{"hand":0,"gesture":"pinky forward"},{"hand":0,"gesture":"palm up"},{"hand":0,"gesture":"open palm"},{"iris":0,"gesture":"looking left"},{"iris":0,"gesture":"looking up"}]} +2022-11-17 14:21:12 INFO:  test-backend-node-wasm.js test: details verification +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js start default +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg default +2022-11-17 14:21:12 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 7 object: 1 person: 1 {"score":1,"age":23.7,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:21:12 DATA:  test-backend-node-wasm.js result: performance: load: null total: 325 +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: details face length 1 +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: details face score 1 0.93 1 +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: details face age/gender 23.7 female 0.97 2.34 +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: details face arrays 4 478 1024 +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: details face emotion 2 {"score":0.59,"emotion":"angry"} +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: details face anti-spoofing 0.79 +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: details face liveness 0.83 +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: details body length 1 +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: details body 0.92 17 6 +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: details hand length 1 +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: details hand 0.51 0.73 point +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: details hand arrays 21 5 7 +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: details gesture length 7 +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: details gesture first {"face":0,"gesture":"facing right"} +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: details object length 1 +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: details object 0.72 person +2022-11-17 14:21:12 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1413675264} +2022-11-17 14:21:13 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:13 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:13 STATE: test-backend-node-wasm.js passed: tensor shape: [1,1200,1200,4] dtype: float32 +2022-11-17 14:21:13 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1200,1200,4] {"checksum":1413675264} +2022-11-17 14:21:13 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:13 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:13 STATE: test-backend-node-wasm.js passed: tensor shape: [1200,1200,4] dtype: float32 +2022-11-17 14:21:14 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} +2022-11-17 14:21:14 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:14 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:14 STATE: test-backend-node-wasm.js passed: tensor shape: [1,1200,1200,3] dtype: float32 +2022-11-17 14:21:14 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1200,1200,3] {"checksum":1038921856} +2022-11-17 14:21:14 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:14 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:14 STATE: test-backend-node-wasm.js passed: tensor shape: [1200,1200,3] dtype: float32 +2022-11-17 14:21:15 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,4] {"checksum":1371996871} +2022-11-17 14:21:15 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:15 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:15 STATE: test-backend-node-wasm.js passed: tensor shape: [1,1200,1200,4] dtype: int32 +2022-11-17 14:21:15 INFO:  test-backend-node-wasm.js test default +2022-11-17 14:21:15 STATE: test-backend-node-wasm.js start async +2022-11-17 14:21:15 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} +2022-11-17 14:21:15 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:16 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:16 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg async +2022-11-17 14:21:16 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 8 object: 1 person: 1 {"score":1,"age":29.6,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:21:16 DATA:  test-backend-node-wasm.js result: performance: load: null total: 338 +2022-11-17 14:21:16 STATE: test-backend-node-wasm.js passed: default result face match 1 female 0.97 +2022-11-17 14:21:16 INFO:  test-backend-node-wasm.js test sync +2022-11-17 14:21:16 STATE: test-backend-node-wasm.js start sync +2022-11-17 14:21:16 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} +2022-11-17 14:21:16 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:16 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:16 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg sync +2022-11-17 14:21:16 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 8 object: 1 person: 1 {"score":1,"age":29.6,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:21:16 DATA:  test-backend-node-wasm.js result: performance: load: null total: 334 +2022-11-17 14:21:16 STATE: test-backend-node-wasm.js passed: default sync 1 female 0.97 +2022-11-17 14:21:16 INFO:  test-backend-node-wasm.js test: image process +2022-11-17 14:21:16 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34697856} +2022-11-17 14:21:16 STATE: test-backend-node-wasm.js passed: image input null [1,256,256,3] +2022-11-17 14:21:16 INFO:  test-backend-node-wasm.js test: image null +2022-11-17 14:21:16 STATE: test-backend-node-wasm.js passed: invalid input could not convert input to tensor +2022-11-17 14:21:16 INFO:  test-backend-node-wasm.js test face similarity +2022-11-17 14:21:16 STATE: test-backend-node-wasm.js start face similarity +2022-11-17 14:21:16 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34697856} +2022-11-17 14:21:16 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:17 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:17 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-face.jpg face similarity +2022-11-17 14:21:17 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 6 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.82,"class":"person"} {"score":0.47,"keypoints":3} +2022-11-17 14:21:17 DATA:  test-backend-node-wasm.js result: performance: load: null total: 301 +2022-11-17 14:21:17 STATE: test-backend-node-wasm.js start face similarity +2022-11-17 14:21:17 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} +2022-11-17 14:21:17 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:17 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:17 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg face similarity +2022-11-17 14:21:17 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 8 object: 1 person: 1 {"score":1,"age":29.6,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:21:17 DATA:  test-backend-node-wasm.js result: performance: load: null total: 326 +2022-11-17 14:21:17 STATE: test-backend-node-wasm.js start face similarity +2022-11-17 14:21:17 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-upper.jpg [1,720,688,3] {"checksum":151155104} +2022-11-17 14:21:17 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:17 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:17 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-upper.jpg face similarity +2022-11-17 14:21:17 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 0 gesture: 4 object: 1 person: 1 {"score":1,"age":23.5,"gender":"female"} {"score":0.71,"class":"person"} {"score":0.75,"keypoints":7} +2022-11-17 14:21:17 DATA:  test-backend-node-wasm.js result: performance: load: null total: 294 +2022-11-17 14:21:17 STATE: test-backend-node-wasm.js passed: face descriptor +2022-11-17 14:21:17 STATE: test-backend-node-wasm.js passed: face similarity {"similarity":[1,0.5266119940661309,0.4858842904087851],"descriptors":[1024,1024,1024]} +2022-11-17 14:21:17 INFO:  test-backend-node-wasm.js test object +2022-11-17 14:21:17 STATE: test-backend-node-wasm.js start object +2022-11-17 14:21:18 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} +2022-11-17 14:21:18 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:18 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:18 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg object +2022-11-17 14:21:18 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 8 object: 1 person: 1 {"score":1,"age":29.6,"gender":"female"} {"score":0.72,"class":"person"} {"score":0.92,"keypoints":17} +2022-11-17 14:21:18 DATA:  test-backend-node-wasm.js result: performance: load: null total: 370 +2022-11-17 14:21:18 STATE: test-backend-node-wasm.js passed: centernet +2022-11-17 14:21:18 STATE: test-backend-node-wasm.js start object +2022-11-17 14:21:19 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} +2022-11-17 14:21:19 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:19 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:19 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg object +2022-11-17 14:21:19 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 1 gesture: 8 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.92,"keypoints":17} +2022-11-17 14:21:19 DATA:  test-backend-node-wasm.js result: performance: load: null total: 231 +2022-11-17 14:21:19 ERROR: test-backend-node-wasm.js failed: nanodet [] +2022-11-17 14:21:19 INFO:  test-backend-node-wasm.js test sensitive +2022-11-17 14:21:19 STATE: test-backend-node-wasm.js start sensitive +2022-11-17 14:21:20 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} +2022-11-17 14:21:20 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:20 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:20 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg sensitive +2022-11-17 14:21:20 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 2 gesture: 10 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.92,"keypoints":17} +2022-11-17 14:21:20 DATA:  test-backend-node-wasm.js result: performance: load: null total: 272 +2022-11-17 14:21:20 STATE: test-backend-node-wasm.js passed: sensitive result match +2022-11-17 14:21:20 STATE: test-backend-node-wasm.js passed: sensitive face result match +2022-11-17 14:21:20 STATE: test-backend-node-wasm.js passed: sensitive face emotion result [{"score":0.46,"emotion":"neutral"},{"score":0.24,"emotion":"fear"},{"score":0.17,"emotion":"sad"}] +2022-11-17 14:21:20 STATE: test-backend-node-wasm.js passed: sensitive body result match +2022-11-17 14:21:20 STATE: test-backend-node-wasm.js passed: sensitive hand result match +2022-11-17 14:21:20 INFO:  test-backend-node-wasm.js test body +2022-11-17 14:21:20 STATE: test-backend-node-wasm.js start blazepose +2022-11-17 14:21:23 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} +2022-11-17 14:21:23 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:23 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:23 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg blazepose +2022-11-17 14:21:23 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 2 gesture: 10 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.99,"keypoints":39} +2022-11-17 14:21:23 DATA:  test-backend-node-wasm.js result: performance: load: null total: 394 +2022-11-17 14:21:23 STATE: test-backend-node-wasm.js passed: blazepose +2022-11-17 14:21:23 STATE: test-backend-node-wasm.js start efficientpose +2022-11-17 14:21:24 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} +2022-11-17 14:21:24 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:25 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:25 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg efficientpose +2022-11-17 14:21:25 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 2 gesture: 10 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.75,"keypoints":13} +2022-11-17 14:21:25 DATA:  test-backend-node-wasm.js result: performance: load: null total: 667 +2022-11-17 14:21:25 STATE: test-backend-node-wasm.js passed: efficientpose +2022-11-17 14:21:25 STATE: test-backend-node-wasm.js start posenet +2022-11-17 14:21:26 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} +2022-11-17 14:21:26 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:26 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:26 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg posenet +2022-11-17 14:21:26 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 2 gesture: 10 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.96,"keypoints":16} +2022-11-17 14:21:26 DATA:  test-backend-node-wasm.js result: performance: load: null total: 297 +2022-11-17 14:21:26 STATE: test-backend-node-wasm.js passed: posenet +2022-11-17 14:21:26 STATE: test-backend-node-wasm.js start movenet +2022-11-17 14:21:26 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-body.jpg [1,1200,1200,3] {"checksum":1038921856} +2022-11-17 14:21:26 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:26 STATE: test-backend-node-wasm.js event: detect +2022-11-17 14:21:26 STATE: test-backend-node-wasm.js passed: detect: samples/in/ai-body.jpg movenet +2022-11-17 14:21:26 DATA:  test-backend-node-wasm.js result: face: 1 body: 1 hand: 2 gesture: 10 object: 0 person: 1 {"score":1,"age":29.6,"gender":"female"} {} {"score":0.92,"keypoints":17} +2022-11-17 14:21:26 DATA:  test-backend-node-wasm.js result: performance: load: null total: 232 +2022-11-17 14:21:26 STATE: test-backend-node-wasm.js passed: movenet +2022-11-17 14:21:26 INFO:  test-backend-node-wasm.js test face matching +2022-11-17 14:21:26 STATE: test-backend-node-wasm.js passed: face database 40 +2022-11-17 14:21:26 STATE: test-backend-node-wasm.js passed: face match {"first":{"index":4,"similarity":0.7827852754786533}} {"second":{"index":4,"similarity":0.5660821189104794}} {"third":{"index":4,"similarity":0.45074189882665594}} +2022-11-17 14:21:26 INFO:  test-backend-node-wasm.js test face similarity alternative +2022-11-17 14:21:26 STATE: test-backend-node-wasm.js start face embeddings +2022-11-17 14:21:27 STATE: test-backend-node-wasm.js passed: load image: samples/in/ai-face.jpg [1,256,256,3] {"checksum":34697856} +2022-11-17 14:21:27 STATE: test-backend-node-wasm.js event: image +2022-11-17 14:21:27 ERROR: test-backend-node-wasm.js failed: testDetect face embeddings +2022-11-17 14:21:27 ERROR: test-backend-node-wasm.js uncaughtException {"name":"TypeError","message":"Cannot read properties of undefined (reading 'img_inputs')","stack":["TypeError: Cannot read properties of undefined (reading 'img_inputs')"," at /home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30706:69"," at Array.reduce ()"," at GraphModel.normalizeInputs (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30705:32)"," at GraphModel.execute (/home/vlado/dev/human/node_modules/.pnpm/@tensorflow+tfjs-converter@4.0.0_hdmpc5coifabqk2ogondqkcwg4/node_modules/@tensorflow/tfjs-converter/dist/tf-converter.node.js:30777:23)"," at /home/vlado/dev/human/dist/human.node-wasm.js:121:37595"," at new Promise ()"," at Z5 (/home/vlado/dev/human/dist/human.node-wasm.js:121:37364)"," at U5 (/home/vlado/dev/human/dist/human.node-wasm.js:121:43414)"," at process.processTicksAndRejections (node:internal/process/task_queues:95:5)"," at async /home/vlado/dev/human/dist/human.node-wasm.js:840:8563"]} +2022-11-17 14:21:27 STATE: all tests complete +2022-11-17 14:21:27 INFO:  status {"test":"../demo/nodejs/node.js","passed":1,"failed":0} +2022-11-17 14:21:27 INFO:  status {"test":"../demo/nodejs/node-simple.js","passed":1,"failed":0} +2022-11-17 14:21:27 INFO:  status {"test":"../demo/nodejs/node-event.js","passed":1,"failed":0} +2022-11-17 14:21:27 INFO:  status {"test":"../demo/nodejs/node-similarity.js","passed":1,"failed":0} +2022-11-17 14:21:27 INFO:  status {"test":"../demo/nodejs/node-canvas.js","passed":1,"failed":0} +2022-11-17 14:21:27 INFO:  status {"test":"../demo/nodejs/process-folder.js","passed":1,"failed":0} +2022-11-17 14:21:27 INFO:  status {"test":"../demo/multithread/node-multiprocess.js","passed":1,"failed":0} +2022-11-17 14:21:27 INFO:  status {"test":"../demo/facematch/node-match.js","passed":1,"failed":0} +2022-11-17 14:21:27 INFO:  status {"test":"test-node-load.js","passed":1,"failed":0} +2022-11-17 14:21:27 INFO:  status {"test":"test-node-gear.js","passed":3,"failed":0} +2022-11-17 14:21:27 INFO:  status {"test":"test-backend-node.js","passed":85,"failed":1} +2022-11-17 14:21:27 INFO:  status {"test":"test-backend-node-gpu.js","passed":85,"failed":1} +2022-11-17 14:21:27 INFO:  status {"test":"test-backend-node-wasm.js","passed":85,"failed":2} +2022-11-17 14:21:27 INFO:  failures {"count":4} +2022-11-17 14:21:27 WARN:  failed {"test":"test-backend-node.js","message":["error",["failed:","testDetect face embeddings"]]} +2022-11-17 14:21:27 WARN:  failed {"test":"test-backend-node-gpu.js","message":["error",["failed:","testDetect face embeddings"]]} +2022-11-17 14:21:27 WARN:  failed {"test":"test-backend-node-wasm.js","message":["error",["failed: nanodet",[]]]} +2022-11-17 14:21:27 WARN:  failed {"test":"test-backend-node-wasm.js","message":["error",["failed:","testDetect face embeddings"]]} diff --git a/wiki b/wiki index 93e58e16..6ea5ea91 160000 --- a/wiki +++ b/wiki @@ -1 +1 @@ -Subproject commit 93e58e16b579922e2f19bc91c8ead0af0f326f5a +Subproject commit 6ea5ea911dcf7ad598c8ee3777b103d7e531fec5