stricter linting, fix face annotations

pull/293/head
Vladimir Mandic 2020-12-27 08:12:22 -05:00
parent 8b52a6b3d9
commit fe0439ec0d
17 changed files with 89 additions and 85 deletions

View File

@ -252,7 +252,6 @@ function webWorker(input, image, canvas, timestamp) {
// create new webworker and add event handler only once
log('creating worker thread');
worker = new Worker(ui.worker, { type: 'module' });
console.log('worker', worker);
// after receiving message from webworker, parse&draw results and send new frame for processing
worker.addEventListener('message', (msg) => {
if (msg.data.result.performance && msg.data.result.performance.total) ui.detectFPS.push(1000 / msg.data.result.performance.total);
@ -270,7 +269,7 @@ function webWorker(input, image, canvas, timestamp) {
});
}
// pass image data as arraybuffer to worker by reference to avoid copy
worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, userConfig }, [image.data.buffer]);
worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, userConfig }, 'Human', [image.data.buffer]);
}
// main processing function when input is webcam, can use direct invocation or web worker

View File

@ -1,3 +1,4 @@
// @ts-nocheck
/* eslint-disable max-len */
// based on: https://github.com/munrocket/gl-bench

View File

@ -1,3 +1,5 @@
// @ts-nocheck
let instance = 0;
let CSScreated = false;

View File

@ -24,6 +24,6 @@ onmessage = async (msg) => {
}
// must strip canvas from return value as it cannot be transfered from worker thread
if (result.canvas) result.canvas = null;
postMessage({ result });
postMessage({ result }, 'Human');
busy = false;
};

View File

@ -46,6 +46,7 @@ async function predict(image, config) {
const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};
ageT = profileAge.result.clone();
profileAge.result.dispose();
// @ts-ignore
profile.run('age', profileAge);
}
enhance.dispose();

View File

@ -65,6 +65,7 @@ async function predict(image, config) {
const profileData = await tf.profile(() => models.emotion.predict(normalize));
data = profileData.result.dataSync();
profileData.result.dispose();
// @ts-ignore
profile.run('emotion', profileData);
}
for (let i = 0; i < data.length; i++) {

View File

@ -53,6 +53,7 @@ async function predict(image, config) {
const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};
genderT = profileGender.result.clone();
profileGender.result.dispose();
// @ts-ignore
profile.run('gender', profileGender);
}
enhance.dispose();

View File

@ -1,19 +1,3 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '../../dist/tfjs.esm.js';
function getBoxSize(box) {

View File

@ -1,20 +1,3 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '../../dist/tfjs.esm.js';
import * as box from './box';

View File

@ -1,20 +1,3 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '../../dist/tfjs.esm.js';
import * as box from './box';
import * as util from './util';

View File

@ -1,19 +1,3 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
// https://storage.googleapis.com/tfjs-models/demos/handpose/index.html
import { log } from '../log.js';
@ -74,7 +58,9 @@ async function load(config) {
config.hand.enabled ? tf.loadGraphModel(config.hand.detector.modelPath, { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) : null,
config.hand.landmarks ? tf.loadGraphModel(config.hand.skeleton.modelPath, { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) : null,
]);
// @ts-ignore
const handDetector = new handdetector.HandDetector(handDetectorModel, config.hand.inputSize, anchors.anchors);
// @ts-ignore
const handPipeline = new handpipeline.HandPipeline(handDetector, handPoseModel, config.hand.inputSize);
const handPose = new HandPose(handPipeline);
if (config.hand.enabled) log(`load model: ${config.hand.detector.modelPath.match(/\/(.*)\./)[1]}`);

View File

@ -18,6 +18,7 @@ import * as app from '../package.json';
// helper function: gets elapsed time on both browser and nodejs
const now = () => {
if (typeof performance !== 'undefined') return performance.now();
// @ts-ignore
return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);
};
@ -72,6 +73,7 @@ class Human {
}
profile() {
// @ts-ignore
if (this.config.profile) return profile.data;
return {};
}
@ -102,6 +104,7 @@ class Human {
}
simmilarity(embedding1, embedding2) {
// @ts-ignore
if (this.config.face.embedding.enabled) return embedding.simmilarity(embedding1, embedding2);
return 0;
}
@ -132,21 +135,35 @@ class Human {
this.models.posenet,
this.models.handpose,
] = await Promise.all([
// @ts-ignore
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config) : null),
// @ts-ignore
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
// @ts-ignore
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
// @ts-ignore
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
// @ts-ignore
this.models.embedding || ((this.config.face.enabled && this.config.face.embedding.enabled) ? embedding.load(this.config) : null),
// @ts-ignore
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
// @ts-ignore
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null),
]);
} else {
// @ts-ignore
if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config);
// @ts-ignore
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);
// @ts-ignore
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
// @ts-ignore
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
// @ts-ignore
if (this.config.face.enabled && this.config.face.embedding.enabled && !this.models.embedding) this.models.embedding = await embedding.load(this.config);
// @ts-ignore
if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);
// @ts-ignore
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config);
}
const current = Math.trunc(now() - timeStamp);
@ -213,7 +230,8 @@ class Human {
const faceRes = [];
this.state = 'run:face';
timeStamp = now();
const faces = await this.models.facemesh.estimateFaces(input, this.config);
// @ts-ignore
const faces = await this.models.facemesh?.estimateFaces(input, this.config);
this.perf.face = Math.trunc(now() - timeStamp);
for (const face of faces) {
this.analyze('Get Face');
@ -227,10 +245,12 @@ class Human {
// run age, inherits face from blazeface
this.analyze('Start Age:');
if (this.config.async) {
// @ts-ignore
ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};
} else {
this.state = 'run:age';
timeStamp = now();
// @ts-ignore
ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};
this.perf.age = Math.trunc(now() - timeStamp);
}
@ -238,10 +258,12 @@ class Human {
// run gender, inherits face from blazeface
this.analyze('Start Gender:');
if (this.config.async) {
// @ts-ignore
genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};
} else {
this.state = 'run:gender';
timeStamp = now();
// @ts-ignore
genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};
this.perf.gender = Math.trunc(now() - timeStamp);
}
@ -249,10 +271,12 @@ class Human {
// run emotion, inherits face from blazeface
this.analyze('Start Emotion:');
if (this.config.async) {
// @ts-ignore
emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};
} else {
this.state = 'run:emotion';
timeStamp = now();
// @ts-ignore
emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
this.perf.emotion = Math.trunc(now() - timeStamp);
}
@ -261,10 +285,12 @@ class Human {
// run emotion, inherits face from blazeface
this.analyze('Start Embedding:');
if (this.config.async) {
// @ts-ignore
embeddingRes = this.config.face.embedding.enabled ? embedding.predict(face.image, this.config) : {};
} else {
this.state = 'run:embedding';
timeStamp = now();
// @ts-ignore
embeddingRes = this.config.face.embedding.enabled ? await embedding.predict(face.image, this.config) : {};
this.perf.embedding = Math.trunc(now() - timeStamp);
}
@ -291,7 +317,6 @@ class Human {
confidence: face.confidence,
box: face.box,
mesh: face.mesh,
// AT: boxRaw, meshRaw
boxRaw: face.boxRaw,
meshRaw: face.meshRaw,
annotations: face.annotations,
@ -317,6 +342,7 @@ class Human {
async image(input, userConfig = {}) {
this.state = 'image';
this.config = mergeDeep(this.config, userConfig);
// @ts-ignore
const process = image.process(input, this.config);
process.tensor.dispose();
return process.canvas;
@ -356,6 +382,7 @@ class Human {
this.analyze('Start Scope:');
timeStamp = now();
// @ts-ignore
const process = image.process(input, this.config);
if (!process || !process.tensor) {
log('could not convert input to tensor');
@ -379,12 +406,14 @@ class Human {
// run posenet
this.analyze('Start Body:');
if (this.config.async) {
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : [];
// @ts-ignore
poseRes = this.config.body.enabled ? this.models.posenet?.estimatePoses(process.tensor, this.config) : [];
if (this.perf.body) delete this.perf.body;
} else {
this.state = 'run:body';
timeStamp = now();
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : [];
// @ts-ignore
poseRes = this.config.body.enabled ? await this.models.posenet?.estimatePoses(process.tensor, this.config) : [];
this.perf.body = Math.trunc(now() - timeStamp);
}
this.analyze('End Body:');
@ -392,12 +421,14 @@ class Human {
// run handpose
this.analyze('Start Hand:');
if (this.config.async) {
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config) : [];
// @ts-ignore
handRes = this.config.hand.enabled ? this.models.handpose?.estimateHands(process.tensor, this.config) : [];
if (this.perf.hand) delete this.perf.hand;
} else {
this.state = 'run:hand';
timeStamp = now();
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config) : [];
// @ts-ignore
handRes = this.config.hand.enabled ? await this.models.handpose?.estimateHands(process.tensor, this.config) : [];
this.perf.hand = Math.trunc(now() - timeStamp);
}
// this.analyze('End Hand:');

View File

@ -41,6 +41,7 @@ function process(input, config) {
if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height;
this.fx = tf.ENV.flags.IS_BROWSER ? new fxImage.Canvas({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')
}
if (!this.fx) return inCanvas;
this.fx.reset();
this.fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled
if (config.filter.contrast !== 0) this.fx.addFilter('contrast', config.filter.contrast);
@ -58,8 +59,8 @@ function process(input, config) {
if (config.filter.pixelate !== 0) this.fx.addFilter('pixelate', config.filter.pixelate);
this.fx.apply(inCanvas);
// read pixel data
// const gl = outCanvas.getContext('webgl');
const gl = false;
/*
const gl = outCanvas.getContext('webgl');
if (gl) {
const glBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 4);
const pixBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 3);
@ -77,6 +78,7 @@ function process(input, config) {
}
outCanvas.data = pixBuffer;
}
*/
} else {
outCanvas = inCanvas;
}
@ -93,8 +95,8 @@ function process(input, config) {
tempCanvas.width = targetWidth;
tempCanvas.height = targetHeight;
const tempCtx = tempCanvas.getContext('2d');
tempCtx.drawImage(outCanvas, 0, 0);
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
tempCtx?.drawImage(outCanvas, 0, 0);
const data = tempCtx?.getImageData(0, 0, targetWidth, targetHeight);
pixels = tf.browser.fromPixels(data);
}
const casted = pixels.toFloat();

View File

@ -20,6 +20,7 @@ const WebGLProgram = function (gl, vertexSource, fragmentSource) {
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
// @ts-ignore
throw new Error('Filter: GL compile failed', gl.getShaderInfoLog(shader));
}
return shader;
@ -37,6 +38,7 @@ const WebGLProgram = function (gl, vertexSource, fragmentSource) {
gl.linkProgram(this.id);
if (!gl.getProgramParameter(this.id, gl.LINK_STATUS)) {
// @ts-ignore
throw new Error('Filter: GL link failed', gl.getProgramInfoLog(this.id));
}
@ -149,9 +151,8 @@ const WebGLImageFilter = function (params) {
};
const _getTempFramebuffer = function (index) {
_tempFramebuffers[index] = _tempFramebuffers[index]
|| _createFramebufferTexture(_width, _height);
// @ts-ignore
_tempFramebuffers[index] = _tempFramebuffers[index] || _createFramebufferTexture(_width, _height);
return _tempFramebuffers[index];
};
@ -190,7 +191,8 @@ const WebGLImageFilter = function (params) {
source = _sourceTexture;
} else {
// All following draw calls use the temp buffer last drawn to
source = _getTempFramebuffer(_currentFramebufferIndex).texture;
// @ts-ignore
source = _getTempFramebuffer(_currentFramebufferIndex)?.texture;
}
_drawCount++;
@ -203,7 +205,8 @@ const WebGLImageFilter = function (params) {
} else {
// Intermediate draw call - get a temp buffer to draw to
_currentFramebufferIndex = (_currentFramebufferIndex + 1) % 2;
target = _getTempFramebuffer(_currentFramebufferIndex).fbo;
// @ts-ignore
target = _getTempFramebuffer(_currentFramebufferIndex)?.fbo;
}
// Bind the source and target and draw the two triangles

View File

@ -20,7 +20,9 @@ export const config = {
export function register() {
if (!tf.findBackend(config.name)) {
// @ts-ignore
config.canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(config.width, config.height) : document.createElement('canvas');
// @ts-ignore
const gl = config.canvas.getContext('webgl2', config.webGLattr);
tf.setWebGLContext(2, gl);
const ctx = new tf.GPGPUContext(gl);

25
tsconfig.json Normal file
View File

@ -0,0 +1,25 @@
{
"compilerOptions": {
"module": "es2020",
"target": "es2018",
"moduleResolution": "node",
"lib": ["es2018", "dom"],
"typeRoots": ["node_modules/@types"],
"outDir": "types",
"declaration": true,
"emitDeclarationOnly": true,
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"importHelpers": true,
"noImplicitAny": false,
"preserveConstEnums": true,
"removeComments": false,
"resolveJsonModule": true,
"skipLibCheck": true,
"sourceMap": false,
"strictNullChecks": false,
"allowJs": true
},
"formatCodeOptions": { "indentSize": 2, "tabSize": 2 },
"include": ["src/*", "demo/*"],
}

2
wiki

@ -1 +1 @@
Subproject commit 699af2235b315ef24766839ddc49a198f7cc21c3
Subproject commit 43425df86424e9b5f4bec3510f3565963255d4a9