diff --git a/demo/gl-bench.js b/demo/gl-bench.js index 66ceed95..72ffffcb 100644 --- a/demo/gl-bench.js +++ b/demo/gl-bench.js @@ -1,4 +1,3 @@ -// @ts-nocheck /* eslint-disable max-len */ // based on: https://github.com/munrocket/gl-bench diff --git a/demo/menu.js b/demo/menu.js index fa16af5e..3eeac1f6 100644 --- a/demo/menu.js +++ b/demo/menu.js @@ -1,5 +1,3 @@ -// @ts-nocheck - let instance = 0; let CSScreated = false; diff --git a/package.json b/package.json index 95ae6f87..a330369a 100644 --- a/package.json +++ b/package.json @@ -46,7 +46,7 @@ "seedrandom": "^3.0.5", "simple-git": "^2.34.2", "tslib": "^2.1.0", - "typescript": "^4.1.3" + "typescript": "^4.2.0-dev.20210208" }, "scripts": { "start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js", diff --git a/src/age/age.ts b/src/age/age.ts index 1428f00d..3b8deac9 100644 --- a/src/age/age.ts +++ b/src/age/age.ts @@ -2,20 +2,20 @@ import { log } from '../log'; import * as tf from '../../dist/tfjs.esm.js'; import * as profile from '../profile.js'; -const models = { age: null }; +let model; let last = { age: 0 }; let skipped = Number.MAX_SAFE_INTEGER; export async function load(config) { - if (!models.age) { - models.age = await tf.loadGraphModel(config.face.age.modelPath); + if (!model) { + model = await tf.loadGraphModel(config.face.age.modelPath); log(`load model: ${config.face.age.modelPath.match(/\/(.*)\./)[1]}`); } - return models.age; + return model; } export async function predict(image, config) { - if (!models.age) return null; + if (!model) return null; if ((skipped < config.face.age.skipFrames) && config.videoOptimized && last.age && (last.age > 0)) { skipped++; return last; @@ -38,27 +38,25 @@ export async function predict(image, config) { tf.dispose(resize); let ageT; - const obj = { age: undefined }; + const obj = { age: 0 }; - if (models.age) { - if (!config.profile) { - if (config.face.age.enabled) ageT = await models.age.predict(enhance); - } else { - const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {}; - ageT = profileAge.result.clone(); - profileAge.result.dispose(); - profile.run('age', profileAge); - } - enhance.dispose(); - - if (ageT) { - const data = ageT.dataSync(); - obj.age = Math.trunc(10 * data[0]) / 10; - } - ageT.dispose(); - - last = obj; + if (!config.profile) { + if (config.face.age.enabled) ageT = await model.predict(enhance); + } else { + const profileAge = config.face.age.enabled ? await tf.profile(() => model.predict(enhance)) : {}; + ageT = profileAge.result.clone(); + profileAge.result.dispose(); + profile.run('age', profileAge); } + enhance.dispose(); + + if (ageT) { + const data = ageT.dataSync(); + obj.age = Math.trunc(10 * data[0]) / 10; + } + ageT.dispose(); + + last = obj; resolve(obj); }); } diff --git a/src/emotion/emotion.ts b/src/emotion/emotion.ts index 4bdd01da..99cc9968 100644 --- a/src/emotion/emotion.ts +++ b/src/emotion/emotion.ts @@ -3,8 +3,8 @@ import * as tf from '../../dist/tfjs.esm.js'; import * as profile from '../profile.js'; const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral']; -const models = { emotion: null }; -let last = []; +let model; +let last: Array<{ score: number, emotion: string }> = []; let skipped = Number.MAX_SAFE_INTEGER; // tuning values @@ -12,15 +12,15 @@ const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when const scale = 1; // score multiplication factor export async function load(config) { - if (!models.emotion) { - models.emotion = await tf.loadGraphModel(config.face.emotion.modelPath); + if (!model) { + model = await tf.loadGraphModel(config.face.emotion.modelPath); log(`load model: ${config.face.emotion.modelPath.match(/\/(.*)\./)[1]}`); } - return models.emotion; + return model; } export async function predict(image, config) { - if (!models.emotion) return null; + if (!model) return null; if ((skipped < config.face.emotion.skipFrames) && config.videoOptimized && (last.length > 0)) { skipped++; return last; @@ -54,15 +54,15 @@ export async function predict(image, config) { blueNorm.dispose(); const normalize = tf.tidy(() => grayscale.sub(0.5).mul(2)); grayscale.dispose(); - const obj = []; + const obj: Array<{ score: number, emotion: string }> = []; if (config.face.emotion.enabled) { let data; if (!config.profile) { - const emotionT = await models.emotion.predict(normalize); + const emotionT = await model.predict(normalize); data = emotionT.dataSync(); tf.dispose(emotionT); } else { - const profileData = await tf.profile(() => models.emotion.predict(normalize)); + const profileData = await tf.profile(() => model.predict(normalize)); data = profileData.result.dataSync(); profileData.result.dispose(); profile.run('emotion', profileData); diff --git a/src/gender/gender.ts b/src/gender/gender.ts index ca0282c0..d905bf04 100644 --- a/src/gender/gender.ts +++ b/src/gender/gender.ts @@ -2,7 +2,7 @@ import { log } from '../log'; import * as tf from '../../dist/tfjs.esm.js'; import * as profile from '../profile.js'; -const models = { gender: null }; +let model; let last = { gender: '' }; let skipped = Number.MAX_SAFE_INTEGER; let alternative = false; @@ -11,16 +11,16 @@ let alternative = false; const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale export async function load(config) { - if (!models.gender) { - models.gender = await tf.loadGraphModel(config.face.gender.modelPath); - alternative = models.gender.inputs[0].shape[3] === 1; + if (!model) { + model = await tf.loadGraphModel(config.face.gender.modelPath); + alternative = model.inputs[0].shape[3] === 1; log(`load model: ${config.face.gender.modelPath.match(/\/(.*)\./)[1]}`); } - return models.gender; + return model; } export async function predict(image, config) { - if (!models.gender) return null; + if (!model) return null; if ((skipped < config.face.gender.skipFrames) && config.videoOptimized && last.gender !== '') { skipped++; return last; @@ -45,12 +45,12 @@ export async function predict(image, config) { tf.dispose(resize); let genderT; - const obj = { gender: undefined, confidence: undefined }; + const obj = { gender: '', confidence: 0 }; if (!config.profile) { - if (config.face.gender.enabled) genderT = await models.gender.predict(enhance); + if (config.face.gender.enabled) genderT = await model.predict(enhance); } else { - const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {}; + const profileGender = config.face.gender.enabled ? await tf.profile(() => model.predict(enhance)) : {}; genderT = profileGender.result.clone(); profileGender.result.dispose(); profile.run('gender', profileGender); diff --git a/src/gesture/gesture.ts b/src/gesture/gesture.ts index 617b8154..a966158a 100644 --- a/src/gesture/gesture.ts +++ b/src/gesture/gesture.ts @@ -1,6 +1,6 @@ export const body = (res) => { if (!res) return []; - const gestures = []; + const gestures: Array<{ body: number, gesture: string }> = []; for (let i = 0; i < res.length; i++) { // raising hands const leftWrist = res[i].keypoints.find((a) => (a.part === 'leftWrist')); @@ -20,7 +20,7 @@ export const body = (res) => { export const face = (res) => { if (!res) return []; - const gestures = []; + const gestures: Array<{ face: number, gesture: string }> = []; for (let i = 0; i < res.length; i++) { if (res[i].mesh && res[i].mesh.length > 0) { const eyeFacing = res[i].mesh[35][2] - res[i].mesh[263][2]; @@ -41,7 +41,7 @@ export const face = (res) => { export const iris = (res) => { if (!res) return []; - const gestures = []; + const gestures: Array<{ iris: number, gesture: string }> = []; for (let i = 0; i < res.length; i++) { if (!res[i].annotations || !res[i].annotations.leftEyeIris || !res[i].annotations.rightEyeIris) continue; const sizeXLeft = res[i].annotations.leftEyeIris[3][0] - res[i].annotations.leftEyeIris[1][0]; @@ -60,10 +60,11 @@ export const iris = (res) => { export const hand = (res) => { if (!res) return []; - const gestures = []; + const gestures: Array<{ hand: number, gesture: string }> = []; for (let i = 0; i < res.length; i++) { - const fingers = []; + const fingers: Array<{ name: string, position: number }> = []; for (const [finger, pos] of Object.entries(res[i]['annotations'])) { + // @ts-ignore if (finger !== 'palmBase') fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger } if (fingers && fingers.length > 0) { diff --git a/src/handpose/handdetector.ts b/src/handpose/handdetector.ts index 2f443237..043d945b 100644 --- a/src/handpose/handdetector.ts +++ b/src/handpose/handdetector.ts @@ -49,7 +49,7 @@ export class HandDetector { scoresT.dispose(); filteredT.dispose(); - const hands = []; + const hands: Array<{ box: any, palmLandmarks: any, confidence: number }> = []; for (const index of filtered) { if (scores[index] >= config.hand.minConfidence) { const matchingBox = tf.slice(boxes, [index, 0], [1, -1]); @@ -70,7 +70,7 @@ export class HandDetector { const image = tf.tidy(() => input.resizeBilinear([config.hand.inputSize, config.hand.inputSize]).div(127.5).sub(1)); const predictions = await this.getBoxes(image, config); image.dispose(); - const hands = []; + const hands: Array<{}> = []; if (!predictions || predictions.length === 0) return hands; for (const prediction of predictions) { const boxes = prediction.box.dataSync(); diff --git a/src/handpose/handpipeline.ts b/src/handpose/handpipeline.ts index 0e1ed088..be2c995b 100644 --- a/src/handpose/handpipeline.ts +++ b/src/handpose/handpipeline.ts @@ -91,7 +91,7 @@ export class HandPipeline { // for (const possible of boxes) this.storedBoxes.push(possible); if (this.storedBoxes.length > 0) useFreshBox = true; } - const hands = []; + const hands: Array<{}> = []; // log('hand', `skipped: ${this.skipped} max: ${config.hand.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`); // go through working set of boxes @@ -124,10 +124,7 @@ export class HandPipeline { const result = { landmarks: coords, confidence, - box: { - topLeft: nextBoundingBox.startPoint, - bottomRight: nextBoundingBox.endPoint, - }, + box: { topLeft: nextBoundingBox.startPoint, bottomRight: nextBoundingBox.endPoint }, }; hands.push(result); } else { @@ -139,10 +136,7 @@ export class HandPipeline { const enlarged = box.enlargeBox(box.squarifyBox(currentBox), HAND_BOX_ENLARGE_FACTOR); const result = { confidence: currentBox.confidence, - box: { - topLeft: enlarged.startPoint, - bottomRight: enlarged.endPoint, - }, + box: { topLeft: enlarged.startPoint, bottomRight: enlarged.endPoint }, }; hands.push(result); } diff --git a/src/handpose/handpose.ts b/src/handpose/handpose.ts index de9dd60d..3f7a475b 100644 --- a/src/handpose/handpose.ts +++ b/src/handpose/handpose.ts @@ -29,7 +29,7 @@ export class HandPose { async estimateHands(input, config) { const predictions = await this.handPipeline.estimateHands(input, config); if (!predictions) return []; - const hands = []; + const hands: Array<{ confidence: number, box: any, landmarks: any, annotations: any }> = []; for (const prediction of predictions) { const annotations = {}; if (prediction.landmarks) { @@ -43,12 +43,7 @@ export class HandPose { Math.min(input.shape[2], prediction.box.bottomRight[0]) - prediction.box.topLeft[0], Math.min(input.shape[1], prediction.box.bottomRight[1]) - prediction.box.topLeft[1], ] : 0; - hands.push({ - confidence: prediction.confidence, - box, - landmarks: prediction.landmarks, - annotations, - }); + hands.push({ confidence: prediction.confidence, box, landmarks: prediction.landmarks, annotations }); } return hands; } diff --git a/src/human.ts b/src/human.ts index 1816595d..301fe3f1 100644 --- a/src/human.ts +++ b/src/human.ts @@ -236,7 +236,7 @@ class Human { let genderRes; let emotionRes; let embeddingRes; - const faceRes = []; + const faceRes: Array<{ confidence: number, box: any, mesh: any, meshRaw: any, boxRaw: any, annotations: any, age: number, gender: string, genderConfidence: number, emotion: string, embedding: any, iris: number }> = []; this.state = 'run:face'; timeStamp = now(); const faces = await this.models.face?.estimateFaces(input, this.config); @@ -349,8 +349,8 @@ class Human { this.state = 'image'; this.config = mergeDeep(this.config, userConfig); const process = image.process(input, this.config); - process.tensor.dispose(); - return process.canvas; + process?.tensor?.dispose(); + return process?.canvas; } // main detect function @@ -445,6 +445,7 @@ class Human { let gestureRes = []; if (this.config.gesture.enabled) { timeStamp = now(); + // @ts-ignore gestureRes = [...gesture.face(faceRes), ...gesture.body(poseRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)]; if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp); else if (this.perf.gesture) delete this.perf.gesture; @@ -495,8 +496,8 @@ class Human { canvas.width = size; canvas.height = size; const ctx = canvas.getContext('2d'); - ctx.drawImage(img, 0, 0); - const data = ctx.getImageData(0, 0, size, size); + ctx?.drawImage(img, 0, 0); + const data = ctx?.getImageData(0, 0, size, size); this.detect(data, this.config).then((res) => resolve(res)); }; if (src) img.src = src; diff --git a/src/image.ts b/src/image.ts index b9cfe7bc..bb5e11bb 100644 --- a/src/image.ts +++ b/src/image.ts @@ -1,3 +1,5 @@ +// @ts-nocheck + import { log } from './log'; import * as tf from '../dist/tfjs.esm.js'; import * as fxImage from './imagefx'; diff --git a/src/tfjs/backend.ts b/src/tfjs/backend.ts index 293cd99f..799394cf 100644 --- a/src/tfjs/backend.ts +++ b/src/tfjs/backend.ts @@ -24,12 +24,14 @@ export function register() { if (!tf.findBackend(config.name)) { log('backend registration:', config.name); try { + // @ts-ignore config.canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(config.width, config.height) : document.createElement('canvas'); } catch (err) { log('error: cannot create canvas:', err); return; } try { + // @ts-ignore config.gl = config.canvas.getContext('webgl2', config.webGLattr); } catch (err) { log('error: cannot get WebGL2 context:', err); @@ -60,6 +62,7 @@ export function register() { } try { tf.ENV.set('WEBGL_VERSION', 2); + // @ts-ignore tf.ENV.set('WEBGL_MAX_TEXTURE_SIZE', config.gl.getParameter(config.gl.MAX_TEXTURE_SIZE)); tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true); tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true); diff --git a/tsconfig.json b/tsconfig.json index d32914d3..23f737ea 100644 --- a/tsconfig.json +++ b/tsconfig.json @@ -17,9 +17,9 @@ "resolveJsonModule": true, "skipLibCheck": true, "sourceMap": false, - "strictNullChecks": false, + "strictNullChecks": true, "allowJs": true }, "formatCodeOptions": { "indentSize": 2, "tabSize": 2 }, - "include": ["src/*", "demo/*"], + "include": ["src/*", "src/***/*", "demo/*"], } diff --git a/wiki b/wiki index d8804390..d365c4e4 160000 --- a/wiki +++ b/wiki @@ -1 +1 @@ -Subproject commit d88043901df1d671a0897275bae8d0f537b84ff5 +Subproject commit d365c4e487508181811ea7ff1a26d682ef6896b7