add attention draw methods

pull/356/head
Vladimir Mandic 2022-04-18 12:26:05 -04:00
parent 4ab5c778bd
commit 4bd1f53a0b
10 changed files with 766 additions and 3499 deletions

View File

@ -9,8 +9,12 @@
## Changelog ## Changelog
### **HEAD -> main** 2022/04/14 mandic00@live.com ### **HEAD -> main** 2022/04/18 mandic00@live.com
### **origin/main** 2022/04/15 mandic00@live.com
- prepare release beta
- change default face crop - change default face crop
- face attention model is available in human-models - face attention model is available in human-models
- beta release 2.7 - beta release 2.7

View File

@ -11,9 +11,14 @@ Release 2.7:
result is speed-up to first inference by around ~30% for browser environments result is speed-up to first inference by around ~30% for browser environments
- Changed default face crop from 120% to 140% - Changed default face crop from 120% to 140%
to better utilize caching between frames to better utilize caching between frames
- Refactor **draw** methods into separate modules - Refactor **draw** methods into separate modules and fix coloring function
- Add **ElectronJS** demo - Add highlights to attention keypoints
enabled when both points and attention draw are enabled:
`human.draw.options.drawAttention = true` and `human.draw.options.drawPoints = true`
- Add **ElectronJS** demo:
see <https://github.com/vladmandic/human-electron> see <https://github.com/vladmandic/human-electron>
- Enhanced **3D** demos:
see <https://github.com/vladmandic/human-motion>
- Update build platform and dependencies - Update build platform and dependencies
- Update **TFJS** - Update **TFJS**

View File

@ -4,329 +4,8 @@
author: <https://github.com/vladmandic>' author: <https://github.com/vladmandic>'
*/ */
// demo/faceid/index.ts import{Human as H}from"../../dist/human.esm.js";var d,R="human",m="person",g=(...t)=>console.log("indexdb",...t);async function b(){return d?!0:new Promise(t=>{let i=indexedDB.open(R,1);i.onerror=s=>g("error:",s),i.onupgradeneeded=s=>{g("create:",s.target),d=s.target.result,d.createObjectStore(m,{keyPath:"id",autoIncrement:!0})},i.onsuccess=s=>{d=s.target.result,g("open:",d),t(!0)}})}async function C(){let t=[];return d||await b(),new Promise(i=>{let s=d.transaction([m],"readwrite").objectStore(m).openCursor(null,"next");s.onerror=o=>g("load error:",o),s.onsuccess=o=>{o.target.result?(t.push(o.target.result.value),o.target.result.continue()):i(t)}})}async function k(){return d||await b(),new Promise(t=>{let i=d.transaction([m],"readwrite").objectStore(m).count();i.onerror=s=>g("count error:",s),i.onsuccess=()=>t(i.result)})}async function x(t){d||await b();let i={name:t.name,descriptor:t.descriptor,image:t.image};d.transaction([m],"readwrite").objectStore(m).put(i),g("save:",i)}async function D(t){d||await b(),d.transaction([m],"readwrite").objectStore(m).delete(t.id),g("delete:",t)}var v={modelBasePath:"../../models",filter:{equalization:!0},face:{enabled:!0,detector:{rotation:!0,return:!0,cropFactor:1.6,mask:!1},description:{enabled:!0},mobilefacenet:{enabled:!1,modelPath:"https://vladmandic.github.io/human-models/models/mobilefacenet.json"},iris:{enabled:!0},emotion:{enabled:!1},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!1},hand:{enabled:!1},object:{enabled:!1},gesture:{enabled:!0}},I={order:2,multiplier:25,min:.2,max:.8},c={minConfidence:.6,minSize:224,maxTime:1e4,blinkMin:10,blinkMax:800,threshold:.5,mask:v.face.detector.mask,rotation:v.face.detector.rotation,cropFactor:v.face.detector.cropFactor,...I},n={faceCount:!1,faceConfidence:!1,facingCenter:!1,lookingCenter:!1,blinkDetected:!1,faceSize:!1,antispoofCheck:!1,livenessCheck:!1,elapsedMs:0},M=()=>n.faceCount&&n.faceSize&&n.blinkDetected&&n.facingCenter&&n.lookingCenter&&n.faceConfidence&&n.antispoofCheck&&n.livenessCheck,r={face:null,record:null},l={start:0,end:0,time:0},a=new H(v);a.env.perfadd=!1;a.draw.options.font='small-caps 18px "Lato"';a.draw.options.lineHeight=20;var e={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("fps"),match:document.getElementById("match"),name:document.getElementById("name"),save:document.getElementById("save"),delete:document.getElementById("delete"),retry:document.getElementById("retry"),source:document.getElementById("source"),ok:document.getElementById("ok")},h={detect:0,draw:0},y={detect:0,draw:0},E=0,p=(...t)=>{e.log.innerText+=t.join(" ")+`
import { Human } from "../../dist/human.esm.js"; `,console.log(...t)},w=t=>e.fps.innerText=t;async function S(){w("starting webcam...");let t={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth}}},i=await navigator.mediaDevices.getUserMedia(t),s=new Promise(o=>{e.video.onloadeddata=()=>o(!0)});e.video.srcObject=i,e.video.play(),await s,e.canvas.width=e.video.videoWidth,e.canvas.height=e.video.videoHeight,a.env.initial&&p("video:",e.video.videoWidth,e.video.videoHeight,"|",i.getVideoTracks()[0].label),e.canvas.onclick=()=>{e.video.paused?e.video.play():e.video.pause()}}async function T(){if(!e.video.paused){r.face&&r.face.tensor&&a.tf.dispose(r.face.tensor),await a.detect(e.video);let t=a.now();y.detect=1e3/(t-h.detect),h.detect=t,requestAnimationFrame(T)}}async function L(){let t=await a.next(a.result);await a.draw.canvas(e.video,e.canvas),await a.draw.all(e.canvas,t);let i=a.now();if(y.draw=1e3/(i-h.draw),h.draw=i,w(`fps: ${y.detect.toFixed(1).padStart(5," ")} detect | ${y.draw.toFixed(1).padStart(5," ")} draw`),n.faceCount=a.result.face.length===1,n.faceCount){let o=Object.values(a.result.gesture).map(f=>f.gesture);(o.includes("blink left eye")||o.includes("blink right eye"))&&(l.start=a.now()),l.start>0&&!o.includes("blink left eye")&&!o.includes("blink right eye")&&(l.end=a.now()),n.blinkDetected=n.blinkDetected||Math.abs(l.end-l.start)>c.blinkMin&&Math.abs(l.end-l.start)<c.blinkMax,n.blinkDetected&&l.time===0&&(l.time=Math.trunc(l.end-l.start)),n.facingCenter=o.includes("facing center"),n.lookingCenter=o.includes("looking center"),n.faceConfidence=(a.result.face[0].boxScore||0)>c.minConfidence&&(a.result.face[0].faceScore||0)>c.minConfidence&&(a.result.face[0].genderScore||0)>c.minConfidence,n.antispoofCheck=(a.result.face[0].real||0)>c.minConfidence,n.livenessCheck=(a.result.face[0].live||0)>c.minConfidence,n.faceSize=a.result.face[0].box[2]>=c.minSize&&a.result.face[0].box[3]>=c.minSize}let s=32;for(let[o,f]of Object.entries(n)){let u=document.getElementById(`ok-${o}`);u||(u=document.createElement("div"),u.innerText=o,u.className="ok",u.style.top=`${s}px`,e.ok.appendChild(u)),typeof f=="boolean"?u.style.backgroundColor=f?"lightgreen":"lightcoral":u.innerText=`${o}:${f}`,s+=28}return M()||n.elapsedMs>c.maxTime?(e.video.pause(),a.result.face[0]):(n.elapsedMs=Math.trunc(a.now()-E),new Promise(o=>{setTimeout(async()=>{await L()&&o(a.result.face[0])},30)}))}async function P(){var t,i;if(e.name.value.length>0){let s=(t=e.canvas.getContext("2d"))==null?void 0:t.getImageData(0,0,e.canvas.width,e.canvas.height),o={id:0,name:e.name.value,descriptor:(i=r.face)==null?void 0:i.embedding,image:s};await x(o),p("saved face record:",o.name)}else p("invalid name")}async function z(){r.record&&r.record.id>0&&await D(r.record)}async function j(){var o,f;if((o=e.canvas.getContext("2d"))==null||o.clearRect(0,0,c.minSize,c.minSize),!r.face||!r.face.tensor||!r.face.embedding)return!1;if(console.log("face record:",r.face),a.tf.browser.toPixels(r.face.tensor,e.canvas),await k()===0)return p("face database is empty"),document.body.style.background="black",e.delete.style.display="none",!1;let t=await C(),i=t.map(u=>u.descriptor),s=await a.match(r.face.embedding,i,I);return r.record=t[s.index]||null,r.record&&(p(`best match: ${r.record.name} | id: ${r.record.id} | similarity: ${Math.round(1e3*s.similarity)/10}%`),e.name.value=r.record.name,e.source.style.display="",(f=e.source.getContext("2d"))==null||f.putImageData(r.record.image,0,0)),document.body.style.background=s.similarity>c.threshold?"darkgreen":"maroon",s.similarity>c.threshold}async function B(){var t,i,s,o;return n.faceCount=!1,n.faceConfidence=!1,n.facingCenter=!1,n.blinkDetected=!1,n.faceSize=!1,n.antispoofCheck=!1,n.livenessCheck=!1,n.elapsedMs=0,e.match.style.display="none",e.retry.style.display="none",e.source.style.display="none",document.body.style.background="black",await S(),await T(),E=a.now(),r.face=await L(),e.canvas.width=((i=(t=r.face)==null?void 0:t.tensor)==null?void 0:i.shape[1])||c.minSize,e.canvas.height=((o=(s=r.face)==null?void 0:s.tensor)==null?void 0:o.shape[0])||c.minSize,e.source.width=e.canvas.width,e.source.height=e.canvas.height,e.canvas.style.width="",e.match.style.display="flex",e.save.style.display="flex",e.delete.style.display="flex",e.retry.style.display="block",M()?j():(p("did not find valid face"),!1)}async function q(){p("human version:",a.version,"| tfjs version:",a.tf.version["tfjs-core"]),p("options:",JSON.stringify(c).replace(/{|}|"|\[|\]/g,"").replace(/,/g," ")),w("loading..."),p("known face records:",await k()),await S(),await a.load(),w("initializing..."),e.retry.addEventListener("click",B),e.save.addEventListener("click",P),e.delete.addEventListener("click",z),await a.warmup(),await B()}window.onload=q;
// demo/faceid/indexdb.ts
var db;
var database = "human";
var table = "person";
var log = (...msg) => console.log("indexdb", ...msg);
async function open() {
if (db)
return true;
return new Promise((resolve) => {
const request = indexedDB.open(database, 1);
request.onerror = (evt) => log("error:", evt);
request.onupgradeneeded = (evt) => {
log("create:", evt.target);
db = evt.target.result;
db.createObjectStore(table, { keyPath: "id", autoIncrement: true });
};
request.onsuccess = (evt) => {
db = evt.target.result;
log("open:", db);
resolve(true);
};
});
}
async function load() {
const faceDB = [];
if (!db)
await open();
return new Promise((resolve) => {
const cursor = db.transaction([table], "readwrite").objectStore(table).openCursor(null, "next");
cursor.onerror = (evt) => log("load error:", evt);
cursor.onsuccess = (evt) => {
if (evt.target.result) {
faceDB.push(evt.target.result.value);
evt.target.result.continue();
} else {
resolve(faceDB);
}
};
});
}
async function count() {
if (!db)
await open();
return new Promise((resolve) => {
const store = db.transaction([table], "readwrite").objectStore(table).count();
store.onerror = (evt) => log("count error:", evt);
store.onsuccess = () => resolve(store.result);
});
}
async function save(faceRecord) {
if (!db)
await open();
const newRecord = { name: faceRecord.name, descriptor: faceRecord.descriptor, image: faceRecord.image };
db.transaction([table], "readwrite").objectStore(table).put(newRecord);
log("save:", newRecord);
}
async function remove(faceRecord) {
if (!db)
await open();
db.transaction([table], "readwrite").objectStore(table).delete(faceRecord.id);
log("delete:", faceRecord);
}
// demo/faceid/index.ts
var humanConfig = {
modelBasePath: "../../models",
filter: { equalization: true },
face: {
enabled: true,
detector: { rotation: true, return: true, cropFactor: 1.6, mask: false },
description: { enabled: true },
mobilefacenet: { enabled: false, modelPath: "https://vladmandic.github.io/human-models/models/mobilefacenet.json" },
iris: { enabled: true },
emotion: { enabled: false },
antispoof: { enabled: true },
liveness: { enabled: true }
},
body: { enabled: false },
hand: { enabled: false },
object: { enabled: false },
gesture: { enabled: true }
};
var matchOptions = { order: 2, multiplier: 25, min: 0.2, max: 0.8 };
var options = {
minConfidence: 0.6,
minSize: 224,
maxTime: 1e4,
blinkMin: 10,
blinkMax: 800,
threshold: 0.5,
mask: humanConfig.face.detector.mask,
rotation: humanConfig.face.detector.rotation,
cropFactor: humanConfig.face.detector.cropFactor,
...matchOptions
};
var ok = {
faceCount: false,
faceConfidence: false,
facingCenter: false,
lookingCenter: false,
blinkDetected: false,
faceSize: false,
antispoofCheck: false,
livenessCheck: false,
elapsedMs: 0
};
var allOk = () => ok.faceCount && ok.faceSize && ok.blinkDetected && ok.facingCenter && ok.lookingCenter && ok.faceConfidence && ok.antispoofCheck && ok.livenessCheck;
var current = { face: null, record: null };
var blink = {
start: 0,
end: 0,
time: 0
};
var human = new Human(humanConfig);
human.env["perfadd"] = false;
human.draw.options.font = 'small-caps 18px "Lato"';
human.draw.options.lineHeight = 20;
var dom = {
video: document.getElementById("video"),
canvas: document.getElementById("canvas"),
log: document.getElementById("log"),
fps: document.getElementById("fps"),
match: document.getElementById("match"),
name: document.getElementById("name"),
save: document.getElementById("save"),
delete: document.getElementById("delete"),
retry: document.getElementById("retry"),
source: document.getElementById("source"),
ok: document.getElementById("ok")
};
var timestamp = { detect: 0, draw: 0 };
var fps = { detect: 0, draw: 0 };
var startTime = 0;
var log2 = (...msg) => {
dom.log.innerText += msg.join(" ") + "\n";
console.log(...msg);
};
var printFPS = (msg) => dom.fps.innerText = msg;
async function webCam() {
printFPS("starting webcam...");
const cameraOptions = { audio: false, video: { facingMode: "user", resizeMode: "none", width: { ideal: document.body.clientWidth } } };
const stream = await navigator.mediaDevices.getUserMedia(cameraOptions);
const ready = new Promise((resolve) => {
dom.video.onloadeddata = () => resolve(true);
});
dom.video.srcObject = stream;
dom.video.play();
await ready;
dom.canvas.width = dom.video.videoWidth;
dom.canvas.height = dom.video.videoHeight;
if (human.env.initial)
log2("video:", dom.video.videoWidth, dom.video.videoHeight, "|", stream.getVideoTracks()[0].label);
dom.canvas.onclick = () => {
if (dom.video.paused)
dom.video.play();
else
dom.video.pause();
};
}
async function detectionLoop() {
if (!dom.video.paused) {
if (current.face && current.face.tensor)
human.tf.dispose(current.face.tensor);
await human.detect(dom.video);
const now = human.now();
fps.detect = 1e3 / (now - timestamp.detect);
timestamp.detect = now;
requestAnimationFrame(detectionLoop);
}
}
async function validationLoop() {
const interpolated = await human.next(human.result);
await human.draw.canvas(dom.video, dom.canvas);
await human.draw.all(dom.canvas, interpolated);
const now = human.now();
fps.draw = 1e3 / (now - timestamp.draw);
timestamp.draw = now;
printFPS(`fps: ${fps.detect.toFixed(1).padStart(5, " ")} detect | ${fps.draw.toFixed(1).padStart(5, " ")} draw`);
ok.faceCount = human.result.face.length === 1;
if (ok.faceCount) {
const gestures = Object.values(human.result.gesture).map((gesture) => gesture.gesture);
if (gestures.includes("blink left eye") || gestures.includes("blink right eye"))
blink.start = human.now();
if (blink.start > 0 && !gestures.includes("blink left eye") && !gestures.includes("blink right eye"))
blink.end = human.now();
ok.blinkDetected = ok.blinkDetected || Math.abs(blink.end - blink.start) > options.blinkMin && Math.abs(blink.end - blink.start) < options.blinkMax;
if (ok.blinkDetected && blink.time === 0)
blink.time = Math.trunc(blink.end - blink.start);
ok.facingCenter = gestures.includes("facing center");
ok.lookingCenter = gestures.includes("looking center");
ok.faceConfidence = (human.result.face[0].boxScore || 0) > options.minConfidence && (human.result.face[0].faceScore || 0) > options.minConfidence && (human.result.face[0].genderScore || 0) > options.minConfidence;
ok.antispoofCheck = (human.result.face[0].real || 0) > options.minConfidence;
ok.livenessCheck = (human.result.face[0].live || 0) > options.minConfidence;
ok.faceSize = human.result.face[0].box[2] >= options.minSize && human.result.face[0].box[3] >= options.minSize;
}
let y = 32;
for (const [key, val] of Object.entries(ok)) {
let el = document.getElementById(`ok-${key}`);
if (!el) {
el = document.createElement("div");
el.innerText = key;
el.className = "ok";
el.style.top = `${y}px`;
dom.ok.appendChild(el);
}
if (typeof val === "boolean")
el.style.backgroundColor = val ? "lightgreen" : "lightcoral";
else
el.innerText = `${key}:${val}`;
y += 28;
}
if (allOk()) {
dom.video.pause();
return human.result.face[0];
}
if (ok.elapsedMs > options.maxTime) {
dom.video.pause();
return human.result.face[0];
} else {
ok.elapsedMs = Math.trunc(human.now() - startTime);
return new Promise((resolve) => {
setTimeout(async () => {
const res = await validationLoop();
if (res)
resolve(human.result.face[0]);
}, 30);
});
}
}
async function saveRecords() {
var _a, _b;
if (dom.name.value.length > 0) {
const image = (_a = dom.canvas.getContext("2d")) == null ? void 0 : _a.getImageData(0, 0, dom.canvas.width, dom.canvas.height);
const rec = { id: 0, name: dom.name.value, descriptor: (_b = current.face) == null ? void 0 : _b.embedding, image };
await save(rec);
log2("saved face record:", rec.name);
} else {
log2("invalid name");
}
}
async function deleteRecord() {
if (current.record && current.record.id > 0) {
await remove(current.record);
}
}
async function detectFace() {
var _a, _b;
(_a = dom.canvas.getContext("2d")) == null ? void 0 : _a.clearRect(0, 0, options.minSize, options.minSize);
if (!current.face || !current.face.tensor || !current.face.embedding)
return false;
console.log("face record:", current.face);
human.tf.browser.toPixels(current.face.tensor, dom.canvas);
if (await count() === 0) {
log2("face database is empty");
document.body.style.background = "black";
dom.delete.style.display = "none";
return false;
}
const db2 = await load();
const descriptors = db2.map((rec) => rec.descriptor);
const res = await human.match(current.face.embedding, descriptors, matchOptions);
current.record = db2[res.index] || null;
if (current.record) {
log2(`best match: ${current.record.name} | id: ${current.record.id} | similarity: ${Math.round(1e3 * res.similarity) / 10}%`);
dom.name.value = current.record.name;
dom.source.style.display = "";
(_b = dom.source.getContext("2d")) == null ? void 0 : _b.putImageData(current.record.image, 0, 0);
}
document.body.style.background = res.similarity > options.threshold ? "darkgreen" : "maroon";
return res.similarity > options.threshold;
}
async function main() {
var _a, _b, _c, _d;
ok.faceCount = false;
ok.faceConfidence = false;
ok.facingCenter = false;
ok.blinkDetected = false;
ok.faceSize = false;
ok.antispoofCheck = false;
ok.livenessCheck = false;
ok.elapsedMs = 0;
dom.match.style.display = "none";
dom.retry.style.display = "none";
dom.source.style.display = "none";
document.body.style.background = "black";
await webCam();
await detectionLoop();
startTime = human.now();
current.face = await validationLoop();
dom.canvas.width = ((_b = (_a = current.face) == null ? void 0 : _a.tensor) == null ? void 0 : _b.shape[1]) || options.minSize;
dom.canvas.height = ((_d = (_c = current.face) == null ? void 0 : _c.tensor) == null ? void 0 : _d.shape[0]) || options.minSize;
dom.source.width = dom.canvas.width;
dom.source.height = dom.canvas.height;
dom.canvas.style.width = "";
dom.match.style.display = "flex";
dom.save.style.display = "flex";
dom.delete.style.display = "flex";
dom.retry.style.display = "block";
if (!allOk()) {
log2("did not find valid face");
return false;
} else {
return detectFace();
}
}
async function init() {
log2("human version:", human.version, "| tfjs version:", human.tf.version["tfjs-core"]);
log2("options:", JSON.stringify(options).replace(/{|}|"|\[|\]/g, "").replace(/,/g, " "));
printFPS("loading...");
log2("known face records:", await count());
await webCam();
await human.load();
printFPS("initializing...");
dom.retry.addEventListener("click", main);
dom.save.addEventListener("click", saveRecords);
dom.delete.addEventListener("click", deleteRecord);
await human.warmup();
await main();
}
window.onload = init;
/** /**
* Human demo for browsers * Human demo for browsers
* @default Human Library * @default Human Library

File diff suppressed because one or more lines are too long

View File

@ -4,100 +4,8 @@
author: <https://github.com/vladmandic>' author: <https://github.com/vladmandic>'
*/ */
// demo/typescript/index.ts import{Human as p}from"../../dist/human.esm.js";var w={modelBasePath:"../../models",filter:{enabled:!0,equalization:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},gesture:{enabled:!0}},t=new p(w);t.env.perfadd=!1;t.draw.options.font='small-caps 18px "Lato"';t.draw.options.lineHeight=20;var e={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},i={detect:0,draw:0,tensors:0},d={detect:0,draw:0},s=(...a)=>{e.log.innerText+=a.join(" ")+`
import { Human } from "../../dist/human.esm.js"; `,console.log(...a)},r=a=>e.fps.innerText=a,b=a=>e.perf.innerText="tensors:"+t.tf.memory().numTensors+" | performance: "+JSON.stringify(a).replace(/"|{|}/g,"").replace(/,/g," | ");async function h(){r("starting webcam...");let a={audio:!1,video:{facingMode:"user",resizeMode:"none",width:{ideal:document.body.clientWidth}}},n=await navigator.mediaDevices.getUserMedia(a),m=new Promise(f=>{e.video.onloadeddata=()=>f(!0)});e.video.srcObject=n,e.video.play(),await m,e.canvas.width=e.video.videoWidth,e.canvas.height=e.video.videoHeight;let o=n.getVideoTracks()[0],v=o.getCapabilities?o.getCapabilities():"",g=o.getSettings?o.getSettings():"",u=o.getConstraints?o.getConstraints():"";s("video:",e.video.videoWidth,e.video.videoHeight,o.label,{stream:n,track:o,settings:g,constraints:u,capabilities:v}),e.canvas.onclick=()=>{e.video.paused?e.video.play():e.video.pause()}}async function c(){if(!e.video.paused){await t.detect(e.video);let n=t.tf.memory().numTensors;n-i.tensors!==0&&s("allocated tensors:",n-i.tensors),i.tensors=n}let a=t.now();d.detect=1e3/(a-i.detect),i.detect=a,requestAnimationFrame(c)}async function l(){if(!e.video.paused){let n=await t.next(t.result);await t.draw.canvas(e.video,e.canvas),await t.draw.all(e.canvas,n),b(n.performance)}let a=t.now();d.draw=1e3/(a-i.draw),i.draw=a,r(e.video.paused?"paused":`fps: ${d.detect.toFixed(1).padStart(5," ")} detect | ${d.draw.toFixed(1).padStart(5," ")} draw`),setTimeout(l,30)}async function y(){s("human version:",t.version,"| tfjs version:",t.tf.version["tfjs-core"]),s("platform:",t.env.platform,"| agent:",t.env.agent),r("loading..."),await t.load(),s("backend:",t.tf.getBackend(),"| available:",t.env.backends),s("loaded models:",Object.values(t.models).filter(a=>a!==null).length),r("initializing..."),await t.warmup(),await h(),await c(),await l()}window.onload=y;
var humanConfig = {
modelBasePath: "../../models",
filter: { enabled: true, equalization: false },
face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true } },
body: { enabled: true },
hand: { enabled: true },
object: { enabled: false },
gesture: { enabled: true }
};
var human = new Human(humanConfig);
human.env["perfadd"] = false;
human.draw.options.font = 'small-caps 18px "Lato"';
human.draw.options.lineHeight = 20;
var dom = {
video: document.getElementById("video"),
canvas: document.getElementById("canvas"),
log: document.getElementById("log"),
fps: document.getElementById("status"),
perf: document.getElementById("performance")
};
var timestamp = { detect: 0, draw: 0, tensors: 0 };
var fps = { detect: 0, draw: 0 };
var log = (...msg) => {
dom.log.innerText += msg.join(" ") + "\n";
console.log(...msg);
};
var status = (msg) => dom.fps.innerText = msg;
var perf = (msg) => dom.perf.innerText = "tensors:" + human.tf.memory().numTensors + " | performance: " + JSON.stringify(msg).replace(/"|{|}/g, "").replace(/,/g, " | ");
async function webCam() {
status("starting webcam...");
const options = { audio: false, video: { facingMode: "user", resizeMode: "none", width: { ideal: document.body.clientWidth } } };
const stream = await navigator.mediaDevices.getUserMedia(options);
const ready = new Promise((resolve) => {
dom.video.onloadeddata = () => resolve(true);
});
dom.video.srcObject = stream;
dom.video.play();
await ready;
dom.canvas.width = dom.video.videoWidth;
dom.canvas.height = dom.video.videoHeight;
const track = stream.getVideoTracks()[0];
const capabilities = track.getCapabilities ? track.getCapabilities() : "";
const settings = track.getSettings ? track.getSettings() : "";
const constraints = track.getConstraints ? track.getConstraints() : "";
log("video:", dom.video.videoWidth, dom.video.videoHeight, track.label, { stream, track, settings, constraints, capabilities });
dom.canvas.onclick = () => {
if (dom.video.paused)
dom.video.play();
else
dom.video.pause();
};
}
async function detectionLoop() {
if (!dom.video.paused) {
await human.detect(dom.video);
const tensors = human.tf.memory().numTensors;
if (tensors - timestamp.tensors !== 0)
log("allocated tensors:", tensors - timestamp.tensors);
timestamp.tensors = tensors;
}
const now = human.now();
fps.detect = 1e3 / (now - timestamp.detect);
timestamp.detect = now;
requestAnimationFrame(detectionLoop);
}
async function drawLoop() {
if (!dom.video.paused) {
const interpolated = await human.next(human.result);
await human.draw.canvas(dom.video, dom.canvas);
await human.draw.all(dom.canvas, interpolated);
perf(interpolated.performance);
}
const now = human.now();
fps.draw = 1e3 / (now - timestamp.draw);
timestamp.draw = now;
status(dom.video.paused ? "paused" : `fps: ${fps.detect.toFixed(1).padStart(5, " ")} detect | ${fps.draw.toFixed(1).padStart(5, " ")} draw`);
setTimeout(drawLoop, 30);
}
async function main() {
log("human version:", human.version, "| tfjs version:", human.tf.version["tfjs-core"]);
log("platform:", human.env.platform, "| agent:", human.env.agent);
status("loading...");
await human.load();
log("backend:", human.tf.getBackend(), "| available:", human.env.backends);
log("loaded models:", Object.values(human.models).filter((model) => model !== null).length);
status("initializing...");
await human.warmup();
await webCam();
await detectionLoop();
await drawLoop();
}
window.onload = main;
/** /**
* Human demo for browsers * Human demo for browsers
* @default Human Library * @default Human Library

File diff suppressed because one or more lines are too long

View File

@ -2,6 +2,7 @@ import { TRI468 as triangulation } from '../face/facemeshcoords';
import { mergeDeep } from '../util/util'; import { mergeDeep } from '../util/util';
import { getCanvasContext, rad2deg, rect, point, lines, arrow } from './primitives'; import { getCanvasContext, rad2deg, rect, point, lines, arrow } from './primitives';
import { options } from './options'; import { options } from './options';
import { attentionDefinitions } from '../face/attention';
import type { FaceResult } from '../result'; import type { FaceResult } from '../result';
import type { AnyCanvas, DrawOptions } from '../exports'; import type { AnyCanvas, DrawOptions } from '../exports';
@ -122,12 +123,15 @@ function drawFacePolygons(f: FaceResult, ctx: CanvasRenderingContext2D | Offscre
} }
function drawFacePoints(f: FaceResult, ctx: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D) { function drawFacePoints(f: FaceResult, ctx: CanvasRenderingContext2D | OffscreenCanvasRenderingContext2D) {
const length = Math.max(468, f.mesh.length);
if (opt.drawPoints && f.mesh.length >= 468) { if (opt.drawPoints && f.mesh.length >= 468) {
for (let i = 0; i < length; i++) point(ctx, f.mesh[i][0], f.mesh[i][1], f.mesh[i][2], opt); for (let i = 0; i < f.mesh.length; i++) {
} point(ctx, f.mesh[i][0], f.mesh[i][1], f.mesh[i][2], opt);
if (opt.drawAttention && f.mesh.length > 468) { if (opt.drawAttention) {
for (let i = 468; i < f.mesh.length; i++) point(ctx, f.mesh[i][0], f.mesh[i][1], f.mesh[i][2], opt); if (attentionDefinitions.lips.includes(i)) point(ctx, f.mesh[i][0], f.mesh[i][1], (f.mesh[i][2] as number) + 127, opt);
if (attentionDefinitions.eyeL.includes(i)) point(ctx, f.mesh[i][0], f.mesh[i][1], (f.mesh[i][2] as number) - 127, opt);
if (attentionDefinitions.eyeR.includes(i)) point(ctx, f.mesh[i][0], f.mesh[i][1], (f.mesh[i][2] as number) - 127, opt);
}
}
} }
} }

View File

@ -1,21 +1,36 @@
import type { Tensor } from '../tfjs/types'; import type { Tensor } from '../tfjs/types';
const attentionDefinitions = { export const attentionDefinitions = {
eyeLLower: [33, 7, 163, 144, 145, 153, 154, 155, 133], // 9 eyeLLower: [33, 7, 163, 144, 145, 153, 154, 155, 133], // 9
eyeRLower: [263, 249, 390, 373, 374, 380, 381, 382, 362], // 9 eyeRLower: [263, 249, 390, 373, 374, 380, 381, 382, 362], // 9
// eslint-disable-next-line max-len // eslint-disable-next-line max-len
lips: [61, 76, 91, 181, 84, 17, 314, 405, 321, 291, 291, 185, 40, 39, 37, 0, 267, 269, 270, 291, 62, 183, 88, 178, 87, 14, 268, 303, 304, 408, 291, 184, 42, 178, 87, 14, 268, 303, 304, 408, 61, 62, 90, 180, 85, 16, 315, 404, 307, 308, 291, 185, 40, 73, 72, 0, 302, 269, 270, 409, 61, 184, 95, 179, 86, 15, 316, 403, 324, 408, 291, 184, 74, 41, 38, 11, 268, 303, 304, 408], lips: [185, 96, 90, 181, 84, 17, 314, 405, 320, 307, 409, 40, 39, 73, 37, 0, 267, 269, 270, 409, 40, 88, 178, 178, 87, 14, 268, 402, 318, 324, 409, 80, 41, 38, 87, 12, 268, 303, 318, 324, 185, 95, 80, 81, 85, 16, 315, 404, 319, 325, 409, 40, 39, 73, 72, 0, 302, 303, 270, 408, 185, 88, 88, 81, 82, 15, 316, 403, 319, 324, 409, 80, 41, 38, 87, 12, 268, 303, 318, 324],
// eslint-disable-next-line max-len // eslint-disable-next-line max-len
eyeL: [33, 7, 163, 144, 145, 153, 154, 155, 133, 246, 161, 160, 159, 158, 157, 173, 130, 25, 110, 24, 23, 22, 26, 112, 243, 247, 30, 29, 27, 28, 56, 190, 226, 31, 228, 229, 230, 231, 232, 233, 244, 113, 225, 224, 223, 222, 221, 189, 35, 124, 46, 53, 52, 65, 143, 111, 117, 118, 119, 120, 121, 128, 245, 156, 70, 63, 105, 66, 107, 55, 193], eyeL: [33, 7, 163, 144, 145, 153, 154, 155, 133, 246, 161, 160, 159, 158, 157, 173, 130, 25, 110, 24, 23, 22, 26, 112, 243, 247, 30, 29, 27, 28, 56, 190, 226, 31, 228, 229, 230, 231, 232, 233, 244, 113, 225, 224, 223, 222, 221, 189, 35, 124, 46, 53, 52, 65, 143, 111, 117, 118, 119, 120, 121, 128, 245, 156, 70, 63, 105, 66, 107, 55, 193],
// eslint-disable-next-line max-len // eslint-disable-next-line max-len
eyeR: [263, 249, 390, 373, 374, 380, 381, 382, 362, 466, 388, 387, 386, 385, 384, 398, 359, 255, 339, 254, 253, 252, 256, 341, 463, 467, 260, 259, 257, 258, 286, 414, 446, 261, 448, 449, 450, 451, 452, 453, 464, 342, 445, 444, 443, 442, 441, 413, 265, 353, 276, 283, 282, 295, 372, 340, 346, 347, 348, 349, 350, 357, 465, 383, 300, 293, 334, 296, 336, 285, 417], eyeR: [263, 249, 390, 373, 374, 380, 381, 382, 362, 466, 388, 387, 386, 385, 384, 398, 359, 255, 339, 254, 253, 252, 256, 341, 463, 467, 260, 259, 257, 258, 286, 414, 446, 261, 448, 449, 450, 451, 452, 453, 464, 342, 445, 444, 443, 442, 441, 413, 265, 353, 276, 283, 282, 295, 372, 340, 346, 347, 348, 349, 350, 357, 465, 383, 300, 293, 334, 296, 336, 285, 417],
}; };
/*
// function used to determine heuristic mapping // function used to determine heuristic mapping
// values in attentionDefinitions are based on top values from 200 iterations // values in attentionDefinitions are based on top values from 200 iterations
/*
const historyRes: number[][] = [];
const getMostCommon = (arr): number => {
const count: Record<string, number> = {};
arr.forEach((el) => {
count[el] = (count[el] || 0) + 1;
});
const res: string[] = Object.keys(count).reduce((acc: string[], val, ind) => {
if (!ind || count[val] > count[acc[0]]) return [val];
if (count[val] === count[acc[0]]) acc.push(val);
return acc;
}, []);
return parseInt(res[0]);
};
function replaceClosestPoint(rawCoords, newCoords) { function replaceClosestPoint(rawCoords, newCoords) {
const res: number[] = []; const currentRes: number[] = [];
for (let i = 0; i < newCoords.length / 2; i++) { for (let i = 0; i < newCoords.length / 2; i++) {
let minDist = Number.MAX_VALUE; let minDist = Number.MAX_VALUE;
let minDistIdx = -1; let minDistIdx = -1;
@ -29,9 +44,19 @@ function replaceClosestPoint(rawCoords, newCoords) {
minDistIdx = j; minDistIdx = j;
} }
} }
res.push(minDistIdx); currentRes.push(minDistIdx);
rawCoords[minDistIdx] = [newCoords[2 * i + 0], newCoords[2 * i + 1], rawCoords[minDistIdx][2]]; rawCoords[minDistIdx] = [newCoords[2 * i + 0], newCoords[2 * i + 1], rawCoords[minDistIdx][2]];
} }
historyRes.push(currentRes);
if (historyRes.length % 50 === 0) {
const invertRes: number[][] = [];
for (let i = 0; i < currentRes.length; i++) {
const indexRes = historyRes.map((j) => j[i]);
invertRes.push(indexRes);
}
const mostCommon: number[] = invertRes.map((r) => getMostCommon(r));
console.log(mostCommon);
}
return rawCoords; return rawCoords;
} }
*/ */
@ -64,7 +89,7 @@ export async function augment(rawCoords, results: Tensor[]) {
// for (let i = 0; i < t.eyeR.length / 2; i++) rawCoords.push([t.eyeR[2 * i + 0], t.eyeR[2 * i + 1], 0]); // for (let i = 0; i < t.eyeR.length / 2; i++) rawCoords.push([t.eyeR[2 * i + 0], t.eyeR[2 * i + 1], 0]);
// augment lips: replaces eye keypoints based on heuristic mapping // augment lips: replaces eye keypoints based on heuristic mapping
// for (let i = 0; i < t.lips.length / 2; i++) rawCoords[attentionDefinitions.lips[i]] = [t.lips[2 * i + 0], t.lips[2 * i + 1], rawCoords[attentionDefinitions.lips[i]][2]]; for (let i = 0; i < t.lips.length / 2; i++) rawCoords[attentionDefinitions.lips[i]] = [t.lips[2 * i + 0], t.lips[2 * i + 1], rawCoords[attentionDefinitions.lips[i]][2]];
// for (let i = 0; i < t.lips.length / 2; i++) rawCoords.push([t.lips[2 * i + 0], t.lips[2 * i + 1], 0]); // for (let i = 0; i < t.lips.length / 2; i++) rawCoords.push([t.lips[2 * i + 0], t.lips[2 * i + 1], 0]);
return rawCoords; return rawCoords;

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff