mirror of https://github.com/vladmandic/human
add optional crop to multiple models
parent
adbab08203
commit
e30d072ebf
|
@ -9,7 +9,7 @@
|
|||
|
||||
## Changelog
|
||||
|
||||
### **HEAD -> main** 2023/02/25 mandic00@live.com
|
||||
### **HEAD -> main** 2023/02/28 mandic00@live.com
|
||||
|
||||
- add electron detection
|
||||
- fix gender-ssrnet-imdb
|
||||
|
|
3
TODO.md
3
TODO.md
|
@ -46,4 +46,5 @@ No support for running in **web workers** as Safari still does not support `Offs
|
|||
|
||||
- add `electron` detection
|
||||
- fix `gender-ssrnet-imdb`
|
||||
- add `movenet-multipose` workaround
|
||||
- fix `movenet-multipose`
|
||||
- add optional `crop` values for *emotion*, *description*, *ssrnet* and *gear* models
|
||||
|
|
|
@ -4,6 +4,6 @@
|
|||
author: <https://github.com/vladmandic>'
|
||||
*/
|
||||
|
||||
import*as m from"../../dist/human.esm.js";var v=1920,b={modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1},face:{enabled:!0,detector:{rotation:!0},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!0},hand:{enabled:!0},object:{enabled:!1},segmentation:{enabled:!1},gesture:{enabled:!0}},e=new m.Human(b);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;e.draw.options.drawPoints=!0;var a={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},s={detectFPS:0,drawFPS:0,frames:0,averageMs:0},o=(...t)=>{a.log.innerText+=t.join(" ")+`
|
||||
`,console.log(...t)},r=t=>a.fps.innerText=t,g=t=>a.perf.innerText="tensors:"+e.tf.memory().numTensors.toString()+" | performance: "+JSON.stringify(t).replace(/"|{|}/g,"").replace(/,/g," | ");async function f(){if(!a.video.paused){n.start===0&&(n.start=e.now()),await e.detect(a.video);let t=e.tf.memory().numTensors;t-n.tensors!==0&&o("allocated tensors:",t-n.tensors),n.tensors=t,s.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,s.frames++,s.averageMs=Math.round(1e3*(e.now()-n.start)/s.frames)/1e3,s.frames%100===0&&!a.video.paused&&o("performance",{...s,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(f)}async function u(){var d,i,c;if(!a.video.paused){let l=e.next(e.result),p=await e.image(a.video);e.draw.canvas(p.canvas,a.canvas);let w={bodyLabels:`person confidence [score] and ${(c=(i=(d=e.result)==null?void 0:d.body)==null?void 0:i[0])==null?void 0:c.keypoints.length} keypoints`};await e.draw.all(a.canvas,l,w),g(l.performance)}let t=e.now();s.drawFPS=Math.round(1e3*1e3/(t-n.draw))/1e3,n.draw=t,r(a.video.paused?"paused":`fps: ${s.detectFPS.toFixed(1).padStart(5," ")} detect | ${s.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(u,30)}async function y(){let d=(await e.webcam.enumerate())[0].deviceId;await e.webcam.start({element:a.video,crop:!1,width:v,id:d}),a.canvas.width=e.webcam.width,a.canvas.height=e.webcam.height,a.canvas.onclick=async()=>{e.webcam.paused?await e.webcam.play():e.webcam.pause()}}async function h(){o("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),o("platform:",e.env.platform,"| agent:",e.env.agent),r("loading..."),await e.load(),o("backend:",e.tf.getBackend(),"| available:",e.env.backends),o("models stats:",e.models.stats()),o("models loaded:",e.models.loaded()),o("environment",e.env),r("initializing..."),await e.warmup(),await y(),await f(),await u()}window.onload=h;
|
||||
import*as m from"../../dist/human.esm.js";var v=1920,b={debug:!0,backend:"webgl",modelBasePath:"../../models",filter:{enabled:!0,equalization:!1,flip:!1},face:{enabled:!0,detector:{rotation:!1},mesh:{enabled:!0},attention:{enabled:!1},iris:{enabled:!0},description:{enabled:!0},emotion:{enabled:!0},antispoof:{enabled:!0},liveness:{enabled:!0}},body:{enabled:!1,modelPath:"https://vladmandic.github.io/human-models/models/movenet-multipose.json",minConfidence:-1},hand:{enabled:!1},object:{enabled:!1},segmentation:{enabled:!1},gesture:{enabled:!0}},e=new m.Human(b);e.env.perfadd=!1;e.draw.options.font='small-caps 18px "Lato"';e.draw.options.lineHeight=20;e.draw.options.drawPoints=!0;var a={video:document.getElementById("video"),canvas:document.getElementById("canvas"),log:document.getElementById("log"),fps:document.getElementById("status"),perf:document.getElementById("performance")},n={detect:0,draw:0,tensors:0,start:0},s={detectFPS:0,drawFPS:0,frames:0,averageMs:0},o=(...t)=>{a.log.innerText+=t.join(" ")+`
|
||||
`,console.log(...t)},i=t=>a.fps.innerText=t,g=t=>a.perf.innerText="tensors:"+e.tf.memory().numTensors.toString()+" | performance: "+JSON.stringify(t).replace(/"|{|}/g,"").replace(/,/g," | ");async function f(){if(!a.video.paused){n.start===0&&(n.start=e.now()),await e.detect(a.video);let t=e.tf.memory().numTensors;t-n.tensors!==0&&o("allocated tensors:",t-n.tensors),n.tensors=t,s.detectFPS=Math.round(1e3*1e3/(e.now()-n.detect))/1e3,s.frames++,s.averageMs=Math.round(1e3*(e.now()-n.start)/s.frames)/1e3,s.frames%100===0&&!a.video.paused&&o("performance",{...s,tensors:n.tensors})}n.detect=e.now(),requestAnimationFrame(f)}async function u(){var d,r,l;if(!a.video.paused){let c=e.next(e.result),p=await e.image(a.video);e.draw.canvas(p.canvas,a.canvas);let w={bodyLabels:`person confidence [score] and ${(l=(r=(d=e.result)==null?void 0:d.body)==null?void 0:r[0])==null?void 0:l.keypoints.length} keypoints`};await e.draw.all(a.canvas,c,w),g(c.performance)}let t=e.now();s.drawFPS=Math.round(1e3*1e3/(t-n.draw))/1e3,n.draw=t,i(a.video.paused?"paused":`fps: ${s.detectFPS.toFixed(1).padStart(5," ")} detect | ${s.drawFPS.toFixed(1).padStart(5," ")} draw`),setTimeout(u,30)}async function h(){let d=(await e.webcam.enumerate())[0].deviceId;await e.webcam.start({element:a.video,crop:!1,width:v,id:d}),a.canvas.width=e.webcam.width,a.canvas.height=e.webcam.height,a.canvas.onclick=async()=>{e.webcam.paused?await e.webcam.play():e.webcam.pause()}}async function y(){o("human version:",e.version,"| tfjs version:",e.tf.version["tfjs-core"]),o("platform:",e.env.platform,"| agent:",e.env.agent),i("loading..."),await e.load(),o("backend:",e.tf.getBackend(),"| available:",e.env.backends),o("models stats:",e.models.stats()),o("models loaded:",e.models.loaded()),o("environment",e.env),i("initializing..."),await e.warmup(),await h(),await f(),await u()}window.onload=y;
|
||||
//# sourceMappingURL=index.js.map
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -12,14 +12,16 @@ import * as H from '../../dist/human.esm.js'; // equivalent of @vladmandic/Human
|
|||
const width = 1920; // used by webcam config as well as human maximum resultion // can be anything, but resolutions higher than 4k will disable internal optimizations
|
||||
|
||||
const humanConfig: Partial<H.Config> = { // user configuration for human, used to fine-tune behavior
|
||||
// backend: 'webgpu',
|
||||
debug: true,
|
||||
backend: 'webgl',
|
||||
// cacheSensitivity: 0,
|
||||
// debug: false,
|
||||
// cacheModels: false,
|
||||
// warmup: 'none',
|
||||
modelBasePath: '../../models',
|
||||
filter: { enabled: true, equalization: false, flip: false },
|
||||
face: { enabled: true, detector: { rotation: true }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true }, antispoof: { enabled: true }, liveness: { enabled: true } },
|
||||
body: { enabled: true },
|
||||
hand: { enabled: true },
|
||||
face: { enabled: true, detector: { rotation: false }, mesh: { enabled: true }, attention: { enabled: false }, iris: { enabled: true }, description: { enabled: true }, emotion: { enabled: true }, antispoof: { enabled: true }, liveness: { enabled: true } },
|
||||
body: { enabled: false, modelPath: 'https://vladmandic.github.io/human-models/models/movenet-multipose.json', minConfidence: -1 },
|
||||
hand: { enabled: false },
|
||||
object: { enabled: false },
|
||||
segmentation: { enabled: false },
|
||||
gesture: { enabled: true },
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1802,7 +1802,7 @@ var C3 = Kt((wg, aI) => {
|
|||
return ly(Dr, sp), sp;
|
||||
} };
|
||||
function fe(Dr) {
|
||||
return B === "string" ? Re(Dr) : B === "boolean" ? Boolean(Dr) : Dr;
|
||||
return B === "string" ? Re(Dr) : B === "boolean" ? !!Dr : Dr;
|
||||
}
|
||||
var ve = cm(F), Ft = [], Qr = 0;
|
||||
if (_e)
|
||||
|
@ -2470,7 +2470,7 @@ var I3 = Kt((Ig, uI) => {
|
|||
return qi(hr, Za), Za;
|
||||
} };
|
||||
function qe(hr) {
|
||||
return se === "string" ? K(hr) : se === "boolean" ? Boolean(hr) : hr;
|
||||
return se === "string" ? K(hr) : se === "boolean" ? !!hr : hr;
|
||||
}
|
||||
var Ue = Jl(G), Wt = [], Yr = 0;
|
||||
if (nt)
|
||||
|
@ -14600,7 +14600,7 @@ function k5(r) {
|
|||
let { inputs: e, backend: t10, attrs: o } = r, { sparseIndices: n, sparseValues: s, defaultValue: a } = e, { outputShape: i } = o, { sliceRank: p, numUpdates: u, sliceSize: c, strides: l, outputSize: m } = S.calculateShapes(s, n, i), d = false, f = t10.bufferSync(n), h;
|
||||
switch (s.dtype) {
|
||||
case "bool": {
|
||||
let g = t10.bufferSync(s), x = Boolean(t10.data.get(a.dataId).values[0]);
|
||||
let g = t10.bufferSync(s), x = !!t10.data.get(a.dataId).values[0];
|
||||
h = Va(f, g, i, m, c, u, p, l, x, d);
|
||||
break;
|
||||
}
|
||||
|
@ -38915,13 +38915,13 @@ async function predict5(image, config3, idx, count2) {
|
|||
}
|
||||
skipped4 = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
var _a3;
|
||||
var _a3, _b3, _c2;
|
||||
const obj = [];
|
||||
if ((_a3 = config3.face.emotion) == null ? void 0 : _a3.enabled) {
|
||||
const t10 = {};
|
||||
const inputSize10 = (model8 == null ? void 0 : model8.inputs[0].shape) ? model8.inputs[0].shape[2] : 0;
|
||||
if (config3.face.emotion["crop"] > 0) {
|
||||
const crop = config3.face.emotion["crop"];
|
||||
if (((_b3 = config3.face.emotion) == null ? void 0 : _b3["crop"]) > 0) {
|
||||
const crop = (_c2 = config3.face.emotion) == null ? void 0 : _c2["crop"];
|
||||
const box = [[crop, crop, 1 - crop, 1 - crop]];
|
||||
t10.resize = eK.cropAndResize(image, box, [0], [inputSize10, inputSize10]);
|
||||
} else {
|
||||
|
@ -38969,11 +38969,19 @@ async function load7(config3) {
|
|||
log("cached model:", model9["modelUrl"]);
|
||||
return model9;
|
||||
}
|
||||
function enhance(input) {
|
||||
function enhance(input, config3) {
|
||||
var _a2, _b2;
|
||||
const tensor = input.image || input.tensor || input;
|
||||
if (!(model9 == null ? void 0 : model9.inputs[0].shape))
|
||||
return tensor;
|
||||
const crop = eK.resizeBilinear(tensor, [model9.inputs[0].shape[2], model9.inputs[0].shape[1]], false);
|
||||
let crop;
|
||||
if (((_a2 = config3.face.description) == null ? void 0 : _a2["crop"]) > 0) {
|
||||
const cropval = (_b2 = config3.face.description) == null ? void 0 : _b2["crop"];
|
||||
const box = [[cropval, cropval, 1 - cropval, 1 - cropval]];
|
||||
crop = eK.cropAndResize(tensor, box, [0], [model9.inputs[0].shape[2], model9.inputs[0].shape[1]]);
|
||||
} else {
|
||||
crop = eK.resizeBilinear(tensor, [model9.inputs[0].shape[2], model9.inputs[0].shape[1]], false);
|
||||
}
|
||||
const norm = ne(crop, constants.tf255);
|
||||
Ot(crop);
|
||||
return norm;
|
||||
|
@ -38998,7 +39006,7 @@ async function predict6(image, config3, idx, count2) {
|
|||
return new Promise(async (resolve) => {
|
||||
var _a3;
|
||||
if ((_a3 = config3.face.description) == null ? void 0 : _a3.enabled) {
|
||||
const enhanced = enhance(image);
|
||||
const enhanced = enhance(image, config3);
|
||||
const resT = model9 == null ? void 0 : model9.execute(enhanced);
|
||||
lastTime5 = now();
|
||||
Ot(enhanced);
|
||||
|
@ -39175,21 +39183,25 @@ async function predict9(image, config3, idx, count2) {
|
|||
}
|
||||
skipped8 = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
var _a3, _b3;
|
||||
var _a3, _b3, _c2, _d2;
|
||||
if (!(model12 == null ? void 0 : model12.inputs[0].shape))
|
||||
return;
|
||||
const t10 = {};
|
||||
const box = [[0, 0.1, 0.9, 0.9]];
|
||||
let box = [[0, 0.1, 0.9, 0.9]];
|
||||
if (((_a3 = config3.face.gear) == null ? void 0 : _a3["crop"]) > 0) {
|
||||
const crop = (_b3 = config3.face.gear) == null ? void 0 : _b3["crop"];
|
||||
box = [[crop, crop, 1 - crop, 1 - crop]];
|
||||
}
|
||||
t10.resize = eK.cropAndResize(image, box, [0], [model12.inputs[0].shape[2], model12.inputs[0].shape[1]]);
|
||||
const obj = { age: 0, gender: "unknown", genderScore: 0, race: [] };
|
||||
if ((_a3 = config3.face.gear) == null ? void 0 : _a3.enabled)
|
||||
if ((_c2 = config3.face.gear) == null ? void 0 : _c2.enabled)
|
||||
[t10.age, t10.gender, t10.race] = model12.execute(t10.resize, ["age_output", "gender_output", "race_output"]);
|
||||
const gender2 = await t10.gender.data();
|
||||
obj.gender = gender2[0] > gender2[1] ? "male" : "female";
|
||||
obj.genderScore = Math.round(100 * (gender2[0] > gender2[1] ? gender2[0] : gender2[1])) / 100;
|
||||
const race = await t10.race.data();
|
||||
for (let i = 0; i < race.length; i++) {
|
||||
if (race[i] > (((_b3 = config3.face.gear) == null ? void 0 : _b3.minConfidence) || 0.2))
|
||||
if (race[i] > (((_d2 = config3.face.gear) == null ? void 0 : _d2.minConfidence) || 0.2))
|
||||
obj.race.push({ score: Math.round(100 * race[i]) / 100, race: raceNames[i] });
|
||||
}
|
||||
obj.race.sort((a, b) => b.score - a.score);
|
||||
|
@ -39234,14 +39246,20 @@ async function predict10(image, config3, idx, count2) {
|
|||
}
|
||||
skipped9 = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
var _a3;
|
||||
var _a3, _b3, _c3;
|
||||
if (!(model13 == null ? void 0 : model13.inputs) || !model13.inputs[0] || !model13.inputs[0].shape)
|
||||
return;
|
||||
const t10 = {};
|
||||
t10.resize = eK.resizeBilinear(image, [model13.inputs[0].shape[2], model13.inputs[0].shape[1]], false);
|
||||
if (((_a3 = config3.face["ssrnet"]) == null ? void 0 : _a3["crop"]) > 0) {
|
||||
const crop = (_b3 = config3.face["ssrnet"]) == null ? void 0 : _b3["crop"];
|
||||
const box = [[crop, crop, 1 - crop, 1 - crop]];
|
||||
t10.resize = eK.cropAndResize(image, box, [0], [model13.inputs[0].shape[2], model13.inputs[0].shape[1]]);
|
||||
} else {
|
||||
t10.resize = eK.resizeBilinear(image, [model13.inputs[0].shape[2], model13.inputs[0].shape[1]], false);
|
||||
}
|
||||
t10.enhance = ne(t10.resize, constants.tf255);
|
||||
const obj = { age: 0 };
|
||||
if ((_a3 = config3.face["ssrnet"]) == null ? void 0 : _a3.enabled)
|
||||
if ((_c3 = config3.face["ssrnet"]) == null ? void 0 : _c3.enabled)
|
||||
t10.age = model13.execute(t10.enhance);
|
||||
if (t10.age) {
|
||||
const data = await t10.age.data();
|
||||
|
@ -39284,15 +39302,21 @@ async function predict11(image, config3, idx, count2) {
|
|||
}
|
||||
skipped10 = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
var _a3;
|
||||
var _a3, _b3, _c3;
|
||||
if (!(model14 == null ? void 0 : model14.inputs[0].shape))
|
||||
return;
|
||||
const t10 = {};
|
||||
t10.resize = eK.resizeBilinear(image, [model14.inputs[0].shape[2], model14.inputs[0].shape[1]], false);
|
||||
if (((_a3 = config3.face["ssrnet"]) == null ? void 0 : _a3["crop"]) > 0) {
|
||||
const crop = (_b3 = config3.face["ssrnet"]) == null ? void 0 : _b3["crop"];
|
||||
const box = [[crop, crop, 1 - crop, 1 - crop]];
|
||||
t10.resize = eK.cropAndResize(image, box, [0], [model14.inputs[0].shape[2], model14.inputs[0].shape[1]]);
|
||||
} else {
|
||||
t10.resize = eK.resizeBilinear(image, [model14.inputs[0].shape[2], model14.inputs[0].shape[1]], false);
|
||||
}
|
||||
t10.enhance = Ee(() => {
|
||||
var _a4, _b3;
|
||||
var _a4, _b4;
|
||||
let normalize2;
|
||||
if (((_b3 = (_a4 = model14 == null ? void 0 : model14.inputs) == null ? void 0 : _a4[0].shape) == null ? void 0 : _b3[3]) === 1) {
|
||||
if (((_b4 = (_a4 = model14 == null ? void 0 : model14.inputs) == null ? void 0 : _a4[0].shape) == null ? void 0 : _b4[3]) === 1) {
|
||||
const [red, green, blue] = Oa(t10.resize, 3, 3);
|
||||
const redNorm = ne(red, rgb2[0]);
|
||||
const greenNorm = ne(green, rgb2[1]);
|
||||
|
@ -39305,7 +39329,7 @@ async function predict11(image, config3, idx, count2) {
|
|||
return normalize2;
|
||||
});
|
||||
const obj = { gender: "unknown", genderScore: 0 };
|
||||
if ((_a3 = config3.face["ssrnet"]) == null ? void 0 : _a3.enabled)
|
||||
if ((_c3 = config3.face["ssrnet"]) == null ? void 0 : _c3.enabled)
|
||||
t10.gender = model14.execute(t10.enhance);
|
||||
const data = await t10.gender.data();
|
||||
obj.gender = data[0] > data[1] ? "female" : "male";
|
||||
|
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
16
package.json
16
package.json
|
@ -78,8 +78,8 @@
|
|||
"tensorflow"
|
||||
],
|
||||
"devDependencies": {
|
||||
"@html-eslint/eslint-plugin": "^0.16.0",
|
||||
"@html-eslint/parser": "^0.16.0",
|
||||
"@html-eslint/eslint-plugin": "^0.17.1",
|
||||
"@html-eslint/parser": "^0.17.1",
|
||||
"@microsoft/api-extractor": "^7.34.4",
|
||||
"@tensorflow/tfjs-backend-cpu": "^4.2.0",
|
||||
"@tensorflow/tfjs-backend-wasm": "^4.2.0",
|
||||
|
@ -91,15 +91,15 @@
|
|||
"@tensorflow/tfjs-layers": "^4.2.0",
|
||||
"@tensorflow/tfjs-node": "^4.2.0",
|
||||
"@tensorflow/tfjs-node-gpu": "^4.2.0",
|
||||
"@types/node": "^18.14.2",
|
||||
"@types/node": "^18.14.6",
|
||||
"@types/offscreencanvas": "^2019.7.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.54.0",
|
||||
"@typescript-eslint/parser": "^5.54.0",
|
||||
"@typescript-eslint/eslint-plugin": "^5.54.1",
|
||||
"@typescript-eslint/parser": "^5.54.1",
|
||||
"@vladmandic/build": "0.8.2",
|
||||
"@vladmandic/pilogger": "^0.4.7",
|
||||
"@vladmandic/tfjs": "github:vladmandic/tfjs",
|
||||
"canvas": "^2.11.0",
|
||||
"esbuild": "^0.17.10",
|
||||
"esbuild": "^0.17.11",
|
||||
"eslint": "8.35.0",
|
||||
"eslint-config-airbnb-base": "^15.0.0",
|
||||
"eslint-plugin-html": "^7.1.0",
|
||||
|
@ -108,9 +108,9 @@
|
|||
"eslint-plugin-markdown": "^3.0.0",
|
||||
"eslint-plugin-node": "^11.1.0",
|
||||
"eslint-plugin-promise": "^6.1.1",
|
||||
"rimraf": "^4.1.2",
|
||||
"rimraf": "^4.3.1",
|
||||
"tslib": "^2.5.0",
|
||||
"typedoc": "0.23.26",
|
||||
"typedoc": "0.24.0-beta.2",
|
||||
"typescript": "4.9.5"
|
||||
}
|
||||
}
|
||||
|
|
|
@ -32,10 +32,17 @@ export async function load(config: Config): Promise<GraphModel> {
|
|||
return model;
|
||||
}
|
||||
|
||||
export function enhance(input): Tensor {
|
||||
export function enhance(input, config: Config): Tensor {
|
||||
const tensor = (input.image || input.tensor || input) as Tensor4D; // input received from detector is already normalized to 0..1, input is also assumed to be straightened
|
||||
if (!model?.inputs[0].shape) return tensor; // model has no shape so no point continuing
|
||||
const crop: Tensor = tf.image.resizeBilinear(tensor, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
||||
let crop: Tensor;
|
||||
if (config.face.description?.['crop'] > 0) { // optional crop
|
||||
const cropval = config.face.description?.['crop'];
|
||||
const box = [[cropval, cropval, 1 - cropval, 1 - cropval]];
|
||||
crop = tf.image.cropAndResize(tensor, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
||||
} else {
|
||||
crop = tf.image.resizeBilinear(tensor, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
||||
}
|
||||
const norm: Tensor = tf.mul(crop, constants.tf255);
|
||||
tf.dispose(crop);
|
||||
return norm;
|
||||
|
@ -75,7 +82,7 @@ export async function predict(image: Tensor4D, config: Config, idx: number, coun
|
|||
skipped = 0;
|
||||
return new Promise(async (resolve) => {
|
||||
if (config.face.description?.enabled) {
|
||||
const enhanced = enhance(image);
|
||||
const enhanced = enhance(image, config);
|
||||
const resT = model?.execute(enhanced) as Tensor[];
|
||||
lastTime = now();
|
||||
tf.dispose(enhanced);
|
||||
|
|
|
@ -48,9 +48,9 @@ export async function predict(image: Tensor4D, config: Config, idx: number, coun
|
|||
if (config.face.emotion?.enabled) {
|
||||
const t: Record<string, Tensor> = {};
|
||||
const inputSize = model?.inputs[0].shape ? model.inputs[0].shape[2] : 0;
|
||||
if (config.face.emotion['crop'] > 0) { // optional crop
|
||||
const crop = config.face.emotion['crop'];
|
||||
const box = [[crop, crop, 1 - crop, 1 - crop]]; // empyrical values for top, left, bottom, right
|
||||
if (config.face.emotion?.['crop'] > 0) { // optional crop
|
||||
const crop = config.face.emotion?.['crop'];
|
||||
const box = [[crop, crop, 1 - crop, 1 - crop]];
|
||||
t.resize = tf.image.cropAndResize(image, box, [0], [inputSize, inputSize]);
|
||||
} else {
|
||||
t.resize = tf.image.resizeBilinear(image, [inputSize, inputSize], false);
|
||||
|
|
|
@ -41,7 +41,11 @@ export async function predict(image: Tensor4D, config: Config, idx: number, coun
|
|||
if (!model?.inputs[0].shape) return;
|
||||
const t: Record<string, Tensor> = {};
|
||||
// t.resize = tf.image.resizeBilinear(image, [model?.inputs[0].shape[2], model?.inputs[0].shape[1]], false);
|
||||
const box = [[0.0, 0.10, 0.90, 0.90]]; // empyrical values for top, left, bottom, right
|
||||
let box = [[0.0, 0.10, 0.90, 0.90]]; // empyrical values for top, left, bottom, right
|
||||
if (config.face.gear?.['crop'] > 0) { // optional crop config value
|
||||
const crop = config.face.gear?.['crop'];
|
||||
box = [[crop, crop, 1 - crop, 1 - crop]];
|
||||
}
|
||||
t.resize = tf.image.cropAndResize(image, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
||||
const obj: GearType = { age: 0, gender: 'unknown', genderScore: 0, race: [] };
|
||||
if (config.face.gear?.enabled) [t.age, t.gender, t.race] = model.execute(t.resize, ['age_output', 'gender_output', 'race_output']) as Tensor[];
|
||||
|
|
|
@ -37,7 +37,13 @@ export async function predict(image: Tensor4D, config: Config, idx: number, coun
|
|||
return new Promise(async (resolve) => {
|
||||
if (!model?.inputs || !model.inputs[0] || !model.inputs[0].shape) return;
|
||||
const t: Record<string, Tensor> = {};
|
||||
t.resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
||||
if (config.face['ssrnet']?.['crop'] > 0) { // optional crop
|
||||
const crop = config.face['ssrnet']?.['crop'];
|
||||
const box = [[crop, crop, 1 - crop, 1 - crop]];
|
||||
t.resize = tf.image.cropAndResize(image, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
||||
} else {
|
||||
t.resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
||||
}
|
||||
t.enhance = tf.mul(t.resize, constants.tf255);
|
||||
const obj = { age: 0 };
|
||||
if (config.face['ssrnet']?.enabled) t.age = model.execute(t.enhance) as Tensor;
|
||||
|
|
|
@ -41,7 +41,13 @@ export async function predict(image: Tensor4D, config: Config, idx, count): Prom
|
|||
return new Promise(async (resolve) => {
|
||||
if (!model?.inputs[0].shape) return;
|
||||
const t: Record<string, Tensor> = {};
|
||||
t.resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
||||
if (config.face['ssrnet']?.['crop'] > 0) { // optional crop
|
||||
const crop = config.face['ssrnet']?.['crop'];
|
||||
const box = [[crop, crop, 1 - crop, 1 - crop]];
|
||||
t.resize = tf.image.cropAndResize(image, box, [0], [model.inputs[0].shape[2], model.inputs[0].shape[1]]);
|
||||
} else {
|
||||
t.resize = tf.image.resizeBilinear(image, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false);
|
||||
}
|
||||
t.enhance = tf.tidy(() => {
|
||||
let normalize: Tensor;
|
||||
if (model?.inputs?.[0].shape?.[3] === 1) {
|
||||
|
|
100
test/build.log
100
test/build.log
|
@ -1,50 +1,50 @@
|
|||
2023-02-28 14:59:24 [32mDATA: [39m Build {"name":"@vladmandic/human","version":"3.0.5"}
|
||||
2023-02-28 14:59:24 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"3.0.5"}
|
||||
2023-02-28 14:59:24 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2023-02-28 14:59:24 [36mINFO: [39m Toolchain: {"build":"0.8.2","esbuild":"0.17.10","typescript":"4.9.5","typedoc":"0.23.26","eslint":"8.35.0"}
|
||||
2023-02-28 14:59:24 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||
2023-02-28 14:59:24 [35mSTATE:[39m Clean: {"locations":["dist/*","types/*","typedoc/*"]}
|
||||
2023-02-28 14:59:24 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1289,"outputBytes":361}
|
||||
2023-02-28 14:59:24 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":569,"outputBytes":924}
|
||||
2023-02-28 14:59:24 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":80,"inputBytes":673124,"outputBytes":319556}
|
||||
2023-02-28 14:59:24 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":577,"outputBytes":928}
|
||||
2023-02-28 14:59:24 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":80,"inputBytes":673128,"outputBytes":319560}
|
||||
2023-02-28 14:59:24 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":665,"outputBytes":1876}
|
||||
2023-02-28 14:59:24 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":80,"inputBytes":674076,"outputBytes":319671}
|
||||
2023-02-28 14:59:24 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1375,"outputBytes":670}
|
||||
2023-02-28 14:59:24 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":80,"inputBytes":672870,"outputBytes":318124}
|
||||
2023-02-28 14:59:24 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":10,"inputBytes":1375,"outputBytes":1151306}
|
||||
2023-02-28 14:59:24 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":80,"inputBytes":1823506,"outputBytes":1465356}
|
||||
2023-02-28 14:59:24 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":80,"inputBytes":1823506,"outputBytes":1932610}
|
||||
2023-02-28 14:59:28 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":15}
|
||||
2023-02-28 14:59:31 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":81,"generated":true}
|
||||
2023-02-28 14:59:31 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6162,"outputBytes":2901}
|
||||
2023-02-28 14:59:31 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17503,"outputBytes":9403}
|
||||
2023-02-28 14:59:41 [35mSTATE:[39m Lint: {"locations":["**/*.json","src/**/*.ts","test/**/*.js","demo/**/*.js","**/*.md"],"files":170,"errors":0,"warnings":0}
|
||||
2023-02-28 14:59:42 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||
2023-02-28 14:59:42 [35mSTATE:[39m Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs-core.d.ts","output":"types/tfjs-core.d.ts"}
|
||||
2023-02-28 14:59:42 [36mINFO: [39m Done...
|
||||
2023-02-28 14:59:42 [35mSTATE:[39m Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs.d.ts","output":"types/tfjs.esm.d.ts"}
|
||||
2023-02-28 14:59:42 [35mSTATE:[39m Copy: {"input":"src/types/tsconfig.json","output":"types/tsconfig.json"}
|
||||
2023-02-28 14:59:42 [35mSTATE:[39m Copy: {"input":"src/types/eslint.json","output":"types/.eslintrc.json"}
|
||||
2023-02-28 14:59:42 [35mSTATE:[39m Copy: {"input":"src/types/tfjs.esm.d.ts","output":"dist/tfjs.esm.d.ts"}
|
||||
2023-02-28 14:59:42 [35mSTATE:[39m Filter: {"input":"types/tfjs-core.d.ts"}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m API-Extractor: {"succeeeded":true,"errors":0,"warnings":210}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m Filter: {"input":"types/human.d.ts"}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m Write: {"output":"dist/human.esm-nobundle.d.ts"}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m Write: {"output":"dist/human.esm.d.ts"}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m Write: {"output":"dist/human.d.ts"}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m Write: {"output":"dist/human.node-gpu.d.ts"}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m Write: {"output":"dist/human.node.d.ts"}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m Write: {"output":"dist/human.node-wasm.d.ts"}
|
||||
2023-02-28 14:59:43 [36mINFO: [39m Analyze models: {"folders":8,"result":"models/models.json"}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m Models {"folder":"./models","models":12}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m Models {"folder":"../human-models/models","models":44}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m Models {"folder":"../blazepose/model/","models":4}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m Models {"folder":"../anti-spoofing/model","models":1}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m Models {"folder":"../efficientpose/models","models":3}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m Models {"folder":"../insightface/models","models":5}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m Models {"folder":"../movenet/models","models":3}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m Models {"folder":"../nanodet/models","models":4}
|
||||
2023-02-28 14:59:43 [35mSTATE:[39m Models: {"count":58,"totalSize":380063249}
|
||||
2023-02-28 14:59:43 [36mINFO: [39m Human Build complete... {"logFile":"test/build.log"}
|
||||
2023-03-06 17:26:10 [32mDATA: [39m Build {"name":"@vladmandic/human","version":"3.0.5"}
|
||||
2023-03-06 17:26:10 [36mINFO: [39m Application: {"name":"@vladmandic/human","version":"3.0.5"}
|
||||
2023-03-06 17:26:10 [36mINFO: [39m Environment: {"profile":"production","config":".build.json","package":"package.json","tsconfig":true,"eslintrc":true,"git":true}
|
||||
2023-03-06 17:26:10 [36mINFO: [39m Toolchain: {"build":"0.8.2","esbuild":"0.17.11","typescript":"4.9.5","typedoc":"0.23.26","eslint":"8.35.0"}
|
||||
2023-03-06 17:26:10 [36mINFO: [39m Build: {"profile":"production","steps":["clean","compile","typings","typedoc","lint","changelog"]}
|
||||
2023-03-06 17:26:10 [35mSTATE:[39m Clean: {"locations":["dist/*","types/*","typedoc/*"]}
|
||||
2023-03-06 17:26:10 [35mSTATE:[39m Compile: {"name":"tfjs/browser/version","format":"esm","platform":"browser","input":"tfjs/tf-version.ts","output":"dist/tfjs.version.js","files":1,"inputBytes":1289,"outputBytes":361}
|
||||
2023-03-06 17:26:10 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/cpu","format":"cjs","platform":"node","input":"tfjs/tf-node.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":569,"outputBytes":924}
|
||||
2023-03-06 17:26:10 [35mSTATE:[39m Compile: {"name":"human/nodejs/cpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node.js","files":80,"inputBytes":674199,"outputBytes":320338}
|
||||
2023-03-06 17:26:10 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/gpu","format":"cjs","platform":"node","input":"tfjs/tf-node-gpu.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":577,"outputBytes":928}
|
||||
2023-03-06 17:26:10 [35mSTATE:[39m Compile: {"name":"human/nodejs/gpu","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-gpu.js","files":80,"inputBytes":674203,"outputBytes":320342}
|
||||
2023-03-06 17:26:10 [35mSTATE:[39m Compile: {"name":"tfjs/nodejs/wasm","format":"cjs","platform":"node","input":"tfjs/tf-node-wasm.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":665,"outputBytes":1876}
|
||||
2023-03-06 17:26:10 [35mSTATE:[39m Compile: {"name":"human/nodejs/wasm","format":"cjs","platform":"node","input":"src/human.ts","output":"dist/human.node-wasm.js","files":80,"inputBytes":675151,"outputBytes":320453}
|
||||
2023-03-06 17:26:10 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/nobundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":2,"inputBytes":1375,"outputBytes":670}
|
||||
2023-03-06 17:26:10 [35mSTATE:[39m Compile: {"name":"human/browser/esm/nobundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm-nobundle.js","files":80,"inputBytes":673945,"outputBytes":318902}
|
||||
2023-03-06 17:26:11 [35mSTATE:[39m Compile: {"name":"tfjs/browser/esm/bundle","format":"esm","platform":"browser","input":"tfjs/tf-browser.ts","output":"dist/tfjs.esm.js","files":10,"inputBytes":1375,"outputBytes":1151285}
|
||||
2023-03-06 17:26:11 [35mSTATE:[39m Compile: {"name":"human/browser/iife/bundle","format":"iife","platform":"browser","input":"src/human.ts","output":"dist/human.js","files":80,"inputBytes":1824560,"outputBytes":1466098}
|
||||
2023-03-06 17:26:11 [35mSTATE:[39m Compile: {"name":"human/browser/esm/bundle","format":"esm","platform":"browser","input":"src/human.ts","output":"dist/human.esm.js","files":80,"inputBytes":1824560,"outputBytes":1933980}
|
||||
2023-03-06 17:26:15 [35mSTATE:[39m Typings: {"input":"src/human.ts","output":"types/lib","files":15}
|
||||
2023-03-06 17:26:17 [35mSTATE:[39m TypeDoc: {"input":"src/human.ts","output":"typedoc","objects":81,"generated":true}
|
||||
2023-03-06 17:26:17 [35mSTATE:[39m Compile: {"name":"demo/typescript","format":"esm","platform":"browser","input":"demo/typescript/index.ts","output":"demo/typescript/index.js","files":1,"inputBytes":6308,"outputBytes":3027}
|
||||
2023-03-06 17:26:17 [35mSTATE:[39m Compile: {"name":"demo/faceid","format":"esm","platform":"browser","input":"demo/faceid/index.ts","output":"demo/faceid/index.js","files":2,"inputBytes":17503,"outputBytes":9403}
|
||||
2023-03-06 17:26:26 [35mSTATE:[39m Lint: {"locations":["**/*.json","src/**/*.ts","test/**/*.js","demo/**/*.js","**/*.md"],"files":170,"errors":0,"warnings":0}
|
||||
2023-03-06 17:26:26 [35mSTATE:[39m ChangeLog: {"repository":"https://github.com/vladmandic/human","branch":"main","output":"CHANGELOG.md"}
|
||||
2023-03-06 17:26:26 [35mSTATE:[39m Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs-core.d.ts","output":"types/tfjs-core.d.ts"}
|
||||
2023-03-06 17:26:26 [36mINFO: [39m Done...
|
||||
2023-03-06 17:26:26 [35mSTATE:[39m Copy: {"input":"node_modules/@vladmandic/tfjs/types/tfjs.d.ts","output":"types/tfjs.esm.d.ts"}
|
||||
2023-03-06 17:26:26 [35mSTATE:[39m Copy: {"input":"src/types/tsconfig.json","output":"types/tsconfig.json"}
|
||||
2023-03-06 17:26:26 [35mSTATE:[39m Copy: {"input":"src/types/eslint.json","output":"types/.eslintrc.json"}
|
||||
2023-03-06 17:26:26 [35mSTATE:[39m Copy: {"input":"src/types/tfjs.esm.d.ts","output":"dist/tfjs.esm.d.ts"}
|
||||
2023-03-06 17:26:26 [35mSTATE:[39m Filter: {"input":"types/tfjs-core.d.ts"}
|
||||
2023-03-06 17:26:27 [35mSTATE:[39m API-Extractor: {"succeeeded":true,"errors":0,"warnings":210}
|
||||
2023-03-06 17:26:27 [35mSTATE:[39m Filter: {"input":"types/human.d.ts"}
|
||||
2023-03-06 17:26:27 [35mSTATE:[39m Write: {"output":"dist/human.esm-nobundle.d.ts"}
|
||||
2023-03-06 17:26:27 [35mSTATE:[39m Write: {"output":"dist/human.esm.d.ts"}
|
||||
2023-03-06 17:26:27 [35mSTATE:[39m Write: {"output":"dist/human.d.ts"}
|
||||
2023-03-06 17:26:27 [35mSTATE:[39m Write: {"output":"dist/human.node-gpu.d.ts"}
|
||||
2023-03-06 17:26:27 [35mSTATE:[39m Write: {"output":"dist/human.node.d.ts"}
|
||||
2023-03-06 17:26:27 [35mSTATE:[39m Write: {"output":"dist/human.node-wasm.d.ts"}
|
||||
2023-03-06 17:26:27 [36mINFO: [39m Analyze models: {"folders":8,"result":"models/models.json"}
|
||||
2023-03-06 17:26:27 [35mSTATE:[39m Models {"folder":"./models","models":12}
|
||||
2023-03-06 17:26:27 [35mSTATE:[39m Models {"folder":"../human-models/models","models":44}
|
||||
2023-03-06 17:26:27 [35mSTATE:[39m Models {"folder":"../blazepose/model/","models":4}
|
||||
2023-03-06 17:26:27 [35mSTATE:[39m Models {"folder":"../anti-spoofing/model","models":1}
|
||||
2023-03-06 17:26:27 [35mSTATE:[39m Models {"folder":"../efficientpose/models","models":3}
|
||||
2023-03-06 17:26:27 [35mSTATE:[39m Models {"folder":"../insightface/models","models":5}
|
||||
2023-03-06 17:26:27 [35mSTATE:[39m Models {"folder":"../movenet/models","models":3}
|
||||
2023-03-06 17:26:27 [35mSTATE:[39m Models {"folder":"../nanodet/models","models":4}
|
||||
2023-03-06 17:26:28 [35mSTATE:[39m Models: {"count":58,"totalSize":380063249}
|
||||
2023-03-06 17:26:28 [36mINFO: [39m Human Build complete... {"logFile":"test/build.log"}
|
||||
|
|
2011
test/test.log
2011
test/test.log
File diff suppressed because it is too large
Load Diff
|
@ -20,7 +20,6 @@
|
|||
"experimentalDecorators": true,
|
||||
"forceConsistentCasingInFileNames": true,
|
||||
"importHelpers": true,
|
||||
"importsNotUsedAsValues": "error",
|
||||
"isolatedModules": false,
|
||||
"noEmitHelpers": true,
|
||||
"noEmitOnError": false,
|
||||
|
|
Loading…
Reference in New Issue