mirror of https://github.com/vladmandic/human
updated typings
parent
bfe688251b
commit
4130ddb32f
|
@ -1,4 +1,3 @@
|
||||||
// @ts-nocheck
|
|
||||||
/* eslint-disable max-len */
|
/* eslint-disable max-len */
|
||||||
|
|
||||||
// based on: https://github.com/munrocket/gl-bench
|
// based on: https://github.com/munrocket/gl-bench
|
||||||
|
|
|
@ -1,5 +1,3 @@
|
||||||
// @ts-nocheck
|
|
||||||
|
|
||||||
let instance = 0;
|
let instance = 0;
|
||||||
let CSScreated = false;
|
let CSScreated = false;
|
||||||
|
|
||||||
|
|
|
@ -46,7 +46,7 @@
|
||||||
"seedrandom": "^3.0.5",
|
"seedrandom": "^3.0.5",
|
||||||
"simple-git": "^2.34.2",
|
"simple-git": "^2.34.2",
|
||||||
"tslib": "^2.1.0",
|
"tslib": "^2.1.0",
|
||||||
"typescript": "^4.1.3"
|
"typescript": "^4.2.0-dev.20210208"
|
||||||
},
|
},
|
||||||
"scripts": {
|
"scripts": {
|
||||||
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
|
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
|
||||||
|
|
|
@ -2,20 +2,20 @@ import { log } from '../log';
|
||||||
import * as tf from '../../dist/tfjs.esm.js';
|
import * as tf from '../../dist/tfjs.esm.js';
|
||||||
import * as profile from '../profile.js';
|
import * as profile from '../profile.js';
|
||||||
|
|
||||||
const models = { age: null };
|
let model;
|
||||||
let last = { age: 0 };
|
let last = { age: 0 };
|
||||||
let skipped = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
|
|
||||||
export async function load(config) {
|
export async function load(config) {
|
||||||
if (!models.age) {
|
if (!model) {
|
||||||
models.age = await tf.loadGraphModel(config.face.age.modelPath);
|
model = await tf.loadGraphModel(config.face.age.modelPath);
|
||||||
log(`load model: ${config.face.age.modelPath.match(/\/(.*)\./)[1]}`);
|
log(`load model: ${config.face.age.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
}
|
}
|
||||||
return models.age;
|
return model;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function predict(image, config) {
|
export async function predict(image, config) {
|
||||||
if (!models.age) return null;
|
if (!model) return null;
|
||||||
if ((skipped < config.face.age.skipFrames) && config.videoOptimized && last.age && (last.age > 0)) {
|
if ((skipped < config.face.age.skipFrames) && config.videoOptimized && last.age && (last.age > 0)) {
|
||||||
skipped++;
|
skipped++;
|
||||||
return last;
|
return last;
|
||||||
|
@ -38,27 +38,25 @@ export async function predict(image, config) {
|
||||||
tf.dispose(resize);
|
tf.dispose(resize);
|
||||||
|
|
||||||
let ageT;
|
let ageT;
|
||||||
const obj = { age: undefined };
|
const obj = { age: 0 };
|
||||||
|
|
||||||
if (models.age) {
|
if (!config.profile) {
|
||||||
if (!config.profile) {
|
if (config.face.age.enabled) ageT = await model.predict(enhance);
|
||||||
if (config.face.age.enabled) ageT = await models.age.predict(enhance);
|
} else {
|
||||||
} else {
|
const profileAge = config.face.age.enabled ? await tf.profile(() => model.predict(enhance)) : {};
|
||||||
const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};
|
ageT = profileAge.result.clone();
|
||||||
ageT = profileAge.result.clone();
|
profileAge.result.dispose();
|
||||||
profileAge.result.dispose();
|
profile.run('age', profileAge);
|
||||||
profile.run('age', profileAge);
|
|
||||||
}
|
|
||||||
enhance.dispose();
|
|
||||||
|
|
||||||
if (ageT) {
|
|
||||||
const data = ageT.dataSync();
|
|
||||||
obj.age = Math.trunc(10 * data[0]) / 10;
|
|
||||||
}
|
|
||||||
ageT.dispose();
|
|
||||||
|
|
||||||
last = obj;
|
|
||||||
}
|
}
|
||||||
|
enhance.dispose();
|
||||||
|
|
||||||
|
if (ageT) {
|
||||||
|
const data = ageT.dataSync();
|
||||||
|
obj.age = Math.trunc(10 * data[0]) / 10;
|
||||||
|
}
|
||||||
|
ageT.dispose();
|
||||||
|
|
||||||
|
last = obj;
|
||||||
resolve(obj);
|
resolve(obj);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
|
@ -3,8 +3,8 @@ import * as tf from '../../dist/tfjs.esm.js';
|
||||||
import * as profile from '../profile.js';
|
import * as profile from '../profile.js';
|
||||||
|
|
||||||
const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral'];
|
const annotations = ['angry', 'disgust', 'fear', 'happy', 'sad', 'surprise', 'neutral'];
|
||||||
const models = { emotion: null };
|
let model;
|
||||||
let last = [];
|
let last: Array<{ score: number, emotion: string }> = [];
|
||||||
let skipped = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
|
|
||||||
// tuning values
|
// tuning values
|
||||||
|
@ -12,15 +12,15 @@ const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when
|
||||||
const scale = 1; // score multiplication factor
|
const scale = 1; // score multiplication factor
|
||||||
|
|
||||||
export async function load(config) {
|
export async function load(config) {
|
||||||
if (!models.emotion) {
|
if (!model) {
|
||||||
models.emotion = await tf.loadGraphModel(config.face.emotion.modelPath);
|
model = await tf.loadGraphModel(config.face.emotion.modelPath);
|
||||||
log(`load model: ${config.face.emotion.modelPath.match(/\/(.*)\./)[1]}`);
|
log(`load model: ${config.face.emotion.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
}
|
}
|
||||||
return models.emotion;
|
return model;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function predict(image, config) {
|
export async function predict(image, config) {
|
||||||
if (!models.emotion) return null;
|
if (!model) return null;
|
||||||
if ((skipped < config.face.emotion.skipFrames) && config.videoOptimized && (last.length > 0)) {
|
if ((skipped < config.face.emotion.skipFrames) && config.videoOptimized && (last.length > 0)) {
|
||||||
skipped++;
|
skipped++;
|
||||||
return last;
|
return last;
|
||||||
|
@ -54,15 +54,15 @@ export async function predict(image, config) {
|
||||||
blueNorm.dispose();
|
blueNorm.dispose();
|
||||||
const normalize = tf.tidy(() => grayscale.sub(0.5).mul(2));
|
const normalize = tf.tidy(() => grayscale.sub(0.5).mul(2));
|
||||||
grayscale.dispose();
|
grayscale.dispose();
|
||||||
const obj = [];
|
const obj: Array<{ score: number, emotion: string }> = [];
|
||||||
if (config.face.emotion.enabled) {
|
if (config.face.emotion.enabled) {
|
||||||
let data;
|
let data;
|
||||||
if (!config.profile) {
|
if (!config.profile) {
|
||||||
const emotionT = await models.emotion.predict(normalize);
|
const emotionT = await model.predict(normalize);
|
||||||
data = emotionT.dataSync();
|
data = emotionT.dataSync();
|
||||||
tf.dispose(emotionT);
|
tf.dispose(emotionT);
|
||||||
} else {
|
} else {
|
||||||
const profileData = await tf.profile(() => models.emotion.predict(normalize));
|
const profileData = await tf.profile(() => model.predict(normalize));
|
||||||
data = profileData.result.dataSync();
|
data = profileData.result.dataSync();
|
||||||
profileData.result.dispose();
|
profileData.result.dispose();
|
||||||
profile.run('emotion', profileData);
|
profile.run('emotion', profileData);
|
||||||
|
|
|
@ -2,7 +2,7 @@ import { log } from '../log';
|
||||||
import * as tf from '../../dist/tfjs.esm.js';
|
import * as tf from '../../dist/tfjs.esm.js';
|
||||||
import * as profile from '../profile.js';
|
import * as profile from '../profile.js';
|
||||||
|
|
||||||
const models = { gender: null };
|
let model;
|
||||||
let last = { gender: '' };
|
let last = { gender: '' };
|
||||||
let skipped = Number.MAX_SAFE_INTEGER;
|
let skipped = Number.MAX_SAFE_INTEGER;
|
||||||
let alternative = false;
|
let alternative = false;
|
||||||
|
@ -11,16 +11,16 @@ let alternative = false;
|
||||||
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
|
const rgb = [0.2989, 0.5870, 0.1140]; // factors for red/green/blue colors when converting to grayscale
|
||||||
|
|
||||||
export async function load(config) {
|
export async function load(config) {
|
||||||
if (!models.gender) {
|
if (!model) {
|
||||||
models.gender = await tf.loadGraphModel(config.face.gender.modelPath);
|
model = await tf.loadGraphModel(config.face.gender.modelPath);
|
||||||
alternative = models.gender.inputs[0].shape[3] === 1;
|
alternative = model.inputs[0].shape[3] === 1;
|
||||||
log(`load model: ${config.face.gender.modelPath.match(/\/(.*)\./)[1]}`);
|
log(`load model: ${config.face.gender.modelPath.match(/\/(.*)\./)[1]}`);
|
||||||
}
|
}
|
||||||
return models.gender;
|
return model;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function predict(image, config) {
|
export async function predict(image, config) {
|
||||||
if (!models.gender) return null;
|
if (!model) return null;
|
||||||
if ((skipped < config.face.gender.skipFrames) && config.videoOptimized && last.gender !== '') {
|
if ((skipped < config.face.gender.skipFrames) && config.videoOptimized && last.gender !== '') {
|
||||||
skipped++;
|
skipped++;
|
||||||
return last;
|
return last;
|
||||||
|
@ -45,12 +45,12 @@ export async function predict(image, config) {
|
||||||
tf.dispose(resize);
|
tf.dispose(resize);
|
||||||
|
|
||||||
let genderT;
|
let genderT;
|
||||||
const obj = { gender: undefined, confidence: undefined };
|
const obj = { gender: '', confidence: 0 };
|
||||||
|
|
||||||
if (!config.profile) {
|
if (!config.profile) {
|
||||||
if (config.face.gender.enabled) genderT = await models.gender.predict(enhance);
|
if (config.face.gender.enabled) genderT = await model.predict(enhance);
|
||||||
} else {
|
} else {
|
||||||
const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};
|
const profileGender = config.face.gender.enabled ? await tf.profile(() => model.predict(enhance)) : {};
|
||||||
genderT = profileGender.result.clone();
|
genderT = profileGender.result.clone();
|
||||||
profileGender.result.dispose();
|
profileGender.result.dispose();
|
||||||
profile.run('gender', profileGender);
|
profile.run('gender', profileGender);
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
export const body = (res) => {
|
export const body = (res) => {
|
||||||
if (!res) return [];
|
if (!res) return [];
|
||||||
const gestures = [];
|
const gestures: Array<{ body: number, gesture: string }> = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
// raising hands
|
// raising hands
|
||||||
const leftWrist = res[i].keypoints.find((a) => (a.part === 'leftWrist'));
|
const leftWrist = res[i].keypoints.find((a) => (a.part === 'leftWrist'));
|
||||||
|
@ -20,7 +20,7 @@ export const body = (res) => {
|
||||||
|
|
||||||
export const face = (res) => {
|
export const face = (res) => {
|
||||||
if (!res) return [];
|
if (!res) return [];
|
||||||
const gestures = [];
|
const gestures: Array<{ face: number, gesture: string }> = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
if (res[i].mesh && res[i].mesh.length > 0) {
|
if (res[i].mesh && res[i].mesh.length > 0) {
|
||||||
const eyeFacing = res[i].mesh[35][2] - res[i].mesh[263][2];
|
const eyeFacing = res[i].mesh[35][2] - res[i].mesh[263][2];
|
||||||
|
@ -41,7 +41,7 @@ export const face = (res) => {
|
||||||
|
|
||||||
export const iris = (res) => {
|
export const iris = (res) => {
|
||||||
if (!res) return [];
|
if (!res) return [];
|
||||||
const gestures = [];
|
const gestures: Array<{ iris: number, gesture: string }> = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
if (!res[i].annotations || !res[i].annotations.leftEyeIris || !res[i].annotations.rightEyeIris) continue;
|
if (!res[i].annotations || !res[i].annotations.leftEyeIris || !res[i].annotations.rightEyeIris) continue;
|
||||||
const sizeXLeft = res[i].annotations.leftEyeIris[3][0] - res[i].annotations.leftEyeIris[1][0];
|
const sizeXLeft = res[i].annotations.leftEyeIris[3][0] - res[i].annotations.leftEyeIris[1][0];
|
||||||
|
@ -60,10 +60,11 @@ export const iris = (res) => {
|
||||||
|
|
||||||
export const hand = (res) => {
|
export const hand = (res) => {
|
||||||
if (!res) return [];
|
if (!res) return [];
|
||||||
const gestures = [];
|
const gestures: Array<{ hand: number, gesture: string }> = [];
|
||||||
for (let i = 0; i < res.length; i++) {
|
for (let i = 0; i < res.length; i++) {
|
||||||
const fingers = [];
|
const fingers: Array<{ name: string, position: number }> = [];
|
||||||
for (const [finger, pos] of Object.entries(res[i]['annotations'])) {
|
for (const [finger, pos] of Object.entries(res[i]['annotations'])) {
|
||||||
|
// @ts-ignore
|
||||||
if (finger !== 'palmBase') fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
|
if (finger !== 'palmBase') fingers.push({ name: finger.toLowerCase(), position: pos[0] }); // get tip of each finger
|
||||||
}
|
}
|
||||||
if (fingers && fingers.length > 0) {
|
if (fingers && fingers.length > 0) {
|
||||||
|
|
|
@ -49,7 +49,7 @@ export class HandDetector {
|
||||||
|
|
||||||
scoresT.dispose();
|
scoresT.dispose();
|
||||||
filteredT.dispose();
|
filteredT.dispose();
|
||||||
const hands = [];
|
const hands: Array<{ box: any, palmLandmarks: any, confidence: number }> = [];
|
||||||
for (const index of filtered) {
|
for (const index of filtered) {
|
||||||
if (scores[index] >= config.hand.minConfidence) {
|
if (scores[index] >= config.hand.minConfidence) {
|
||||||
const matchingBox = tf.slice(boxes, [index, 0], [1, -1]);
|
const matchingBox = tf.slice(boxes, [index, 0], [1, -1]);
|
||||||
|
@ -70,7 +70,7 @@ export class HandDetector {
|
||||||
const image = tf.tidy(() => input.resizeBilinear([config.hand.inputSize, config.hand.inputSize]).div(127.5).sub(1));
|
const image = tf.tidy(() => input.resizeBilinear([config.hand.inputSize, config.hand.inputSize]).div(127.5).sub(1));
|
||||||
const predictions = await this.getBoxes(image, config);
|
const predictions = await this.getBoxes(image, config);
|
||||||
image.dispose();
|
image.dispose();
|
||||||
const hands = [];
|
const hands: Array<{}> = [];
|
||||||
if (!predictions || predictions.length === 0) return hands;
|
if (!predictions || predictions.length === 0) return hands;
|
||||||
for (const prediction of predictions) {
|
for (const prediction of predictions) {
|
||||||
const boxes = prediction.box.dataSync();
|
const boxes = prediction.box.dataSync();
|
||||||
|
|
|
@ -91,7 +91,7 @@ export class HandPipeline {
|
||||||
// for (const possible of boxes) this.storedBoxes.push(possible);
|
// for (const possible of boxes) this.storedBoxes.push(possible);
|
||||||
if (this.storedBoxes.length > 0) useFreshBox = true;
|
if (this.storedBoxes.length > 0) useFreshBox = true;
|
||||||
}
|
}
|
||||||
const hands = [];
|
const hands: Array<{}> = [];
|
||||||
// log('hand', `skipped: ${this.skipped} max: ${config.hand.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`);
|
// log('hand', `skipped: ${this.skipped} max: ${config.hand.maxHands} detected: ${this.detectedHands} stored: ${this.storedBoxes.length} new: ${boxes?.length}`);
|
||||||
|
|
||||||
// go through working set of boxes
|
// go through working set of boxes
|
||||||
|
@ -124,10 +124,7 @@ export class HandPipeline {
|
||||||
const result = {
|
const result = {
|
||||||
landmarks: coords,
|
landmarks: coords,
|
||||||
confidence,
|
confidence,
|
||||||
box: {
|
box: { topLeft: nextBoundingBox.startPoint, bottomRight: nextBoundingBox.endPoint },
|
||||||
topLeft: nextBoundingBox.startPoint,
|
|
||||||
bottomRight: nextBoundingBox.endPoint,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
hands.push(result);
|
hands.push(result);
|
||||||
} else {
|
} else {
|
||||||
|
@ -139,10 +136,7 @@ export class HandPipeline {
|
||||||
const enlarged = box.enlargeBox(box.squarifyBox(currentBox), HAND_BOX_ENLARGE_FACTOR);
|
const enlarged = box.enlargeBox(box.squarifyBox(currentBox), HAND_BOX_ENLARGE_FACTOR);
|
||||||
const result = {
|
const result = {
|
||||||
confidence: currentBox.confidence,
|
confidence: currentBox.confidence,
|
||||||
box: {
|
box: { topLeft: enlarged.startPoint, bottomRight: enlarged.endPoint },
|
||||||
topLeft: enlarged.startPoint,
|
|
||||||
bottomRight: enlarged.endPoint,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
hands.push(result);
|
hands.push(result);
|
||||||
}
|
}
|
||||||
|
|
|
@ -29,7 +29,7 @@ export class HandPose {
|
||||||
async estimateHands(input, config) {
|
async estimateHands(input, config) {
|
||||||
const predictions = await this.handPipeline.estimateHands(input, config);
|
const predictions = await this.handPipeline.estimateHands(input, config);
|
||||||
if (!predictions) return [];
|
if (!predictions) return [];
|
||||||
const hands = [];
|
const hands: Array<{ confidence: number, box: any, landmarks: any, annotations: any }> = [];
|
||||||
for (const prediction of predictions) {
|
for (const prediction of predictions) {
|
||||||
const annotations = {};
|
const annotations = {};
|
||||||
if (prediction.landmarks) {
|
if (prediction.landmarks) {
|
||||||
|
@ -43,12 +43,7 @@ export class HandPose {
|
||||||
Math.min(input.shape[2], prediction.box.bottomRight[0]) - prediction.box.topLeft[0],
|
Math.min(input.shape[2], prediction.box.bottomRight[0]) - prediction.box.topLeft[0],
|
||||||
Math.min(input.shape[1], prediction.box.bottomRight[1]) - prediction.box.topLeft[1],
|
Math.min(input.shape[1], prediction.box.bottomRight[1]) - prediction.box.topLeft[1],
|
||||||
] : 0;
|
] : 0;
|
||||||
hands.push({
|
hands.push({ confidence: prediction.confidence, box, landmarks: prediction.landmarks, annotations });
|
||||||
confidence: prediction.confidence,
|
|
||||||
box,
|
|
||||||
landmarks: prediction.landmarks,
|
|
||||||
annotations,
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
return hands;
|
return hands;
|
||||||
}
|
}
|
||||||
|
|
11
src/human.ts
11
src/human.ts
|
@ -236,7 +236,7 @@ class Human {
|
||||||
let genderRes;
|
let genderRes;
|
||||||
let emotionRes;
|
let emotionRes;
|
||||||
let embeddingRes;
|
let embeddingRes;
|
||||||
const faceRes = [];
|
const faceRes: Array<{ confidence: number, box: any, mesh: any, meshRaw: any, boxRaw: any, annotations: any, age: number, gender: string, genderConfidence: number, emotion: string, embedding: any, iris: number }> = [];
|
||||||
this.state = 'run:face';
|
this.state = 'run:face';
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
const faces = await this.models.face?.estimateFaces(input, this.config);
|
const faces = await this.models.face?.estimateFaces(input, this.config);
|
||||||
|
@ -349,8 +349,8 @@ class Human {
|
||||||
this.state = 'image';
|
this.state = 'image';
|
||||||
this.config = mergeDeep(this.config, userConfig);
|
this.config = mergeDeep(this.config, userConfig);
|
||||||
const process = image.process(input, this.config);
|
const process = image.process(input, this.config);
|
||||||
process.tensor.dispose();
|
process?.tensor?.dispose();
|
||||||
return process.canvas;
|
return process?.canvas;
|
||||||
}
|
}
|
||||||
|
|
||||||
// main detect function
|
// main detect function
|
||||||
|
@ -445,6 +445,7 @@ class Human {
|
||||||
let gestureRes = [];
|
let gestureRes = [];
|
||||||
if (this.config.gesture.enabled) {
|
if (this.config.gesture.enabled) {
|
||||||
timeStamp = now();
|
timeStamp = now();
|
||||||
|
// @ts-ignore
|
||||||
gestureRes = [...gesture.face(faceRes), ...gesture.body(poseRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)];
|
gestureRes = [...gesture.face(faceRes), ...gesture.body(poseRes), ...gesture.hand(handRes), ...gesture.iris(faceRes)];
|
||||||
if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);
|
if (!this.config.async) this.perf.gesture = Math.trunc(now() - timeStamp);
|
||||||
else if (this.perf.gesture) delete this.perf.gesture;
|
else if (this.perf.gesture) delete this.perf.gesture;
|
||||||
|
@ -495,8 +496,8 @@ class Human {
|
||||||
canvas.width = size;
|
canvas.width = size;
|
||||||
canvas.height = size;
|
canvas.height = size;
|
||||||
const ctx = canvas.getContext('2d');
|
const ctx = canvas.getContext('2d');
|
||||||
ctx.drawImage(img, 0, 0);
|
ctx?.drawImage(img, 0, 0);
|
||||||
const data = ctx.getImageData(0, 0, size, size);
|
const data = ctx?.getImageData(0, 0, size, size);
|
||||||
this.detect(data, this.config).then((res) => resolve(res));
|
this.detect(data, this.config).then((res) => resolve(res));
|
||||||
};
|
};
|
||||||
if (src) img.src = src;
|
if (src) img.src = src;
|
||||||
|
|
|
@ -1,3 +1,5 @@
|
||||||
|
// @ts-nocheck
|
||||||
|
|
||||||
import { log } from './log';
|
import { log } from './log';
|
||||||
import * as tf from '../dist/tfjs.esm.js';
|
import * as tf from '../dist/tfjs.esm.js';
|
||||||
import * as fxImage from './imagefx';
|
import * as fxImage from './imagefx';
|
||||||
|
|
|
@ -24,12 +24,14 @@ export function register() {
|
||||||
if (!tf.findBackend(config.name)) {
|
if (!tf.findBackend(config.name)) {
|
||||||
log('backend registration:', config.name);
|
log('backend registration:', config.name);
|
||||||
try {
|
try {
|
||||||
|
// @ts-ignore
|
||||||
config.canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(config.width, config.height) : document.createElement('canvas');
|
config.canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(config.width, config.height) : document.createElement('canvas');
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
log('error: cannot create canvas:', err);
|
log('error: cannot create canvas:', err);
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
|
// @ts-ignore
|
||||||
config.gl = config.canvas.getContext('webgl2', config.webGLattr);
|
config.gl = config.canvas.getContext('webgl2', config.webGLattr);
|
||||||
} catch (err) {
|
} catch (err) {
|
||||||
log('error: cannot get WebGL2 context:', err);
|
log('error: cannot get WebGL2 context:', err);
|
||||||
|
@ -60,6 +62,7 @@ export function register() {
|
||||||
}
|
}
|
||||||
try {
|
try {
|
||||||
tf.ENV.set('WEBGL_VERSION', 2);
|
tf.ENV.set('WEBGL_VERSION', 2);
|
||||||
|
// @ts-ignore
|
||||||
tf.ENV.set('WEBGL_MAX_TEXTURE_SIZE', config.gl.getParameter(config.gl.MAX_TEXTURE_SIZE));
|
tf.ENV.set('WEBGL_MAX_TEXTURE_SIZE', config.gl.getParameter(config.gl.MAX_TEXTURE_SIZE));
|
||||||
tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
|
tf.ENV.set('WEBGL_FORCE_F16_TEXTURES', true);
|
||||||
tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
|
tf.ENV.set('WEBGL_PACK_DEPTHWISECONV', true);
|
||||||
|
|
|
@ -17,9 +17,9 @@
|
||||||
"resolveJsonModule": true,
|
"resolveJsonModule": true,
|
||||||
"skipLibCheck": true,
|
"skipLibCheck": true,
|
||||||
"sourceMap": false,
|
"sourceMap": false,
|
||||||
"strictNullChecks": false,
|
"strictNullChecks": true,
|
||||||
"allowJs": true
|
"allowJs": true
|
||||||
},
|
},
|
||||||
"formatCodeOptions": { "indentSize": 2, "tabSize": 2 },
|
"formatCodeOptions": { "indentSize": 2, "tabSize": 2 },
|
||||||
"include": ["src/*", "demo/*"],
|
"include": ["src/*", "src/***/*", "demo/*"],
|
||||||
}
|
}
|
||||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
||||||
Subproject commit d88043901df1d671a0897275bae8d0f537b84ff5
|
Subproject commit d365c4e487508181811ea7ff1a26d682ef6896b7
|
Loading…
Reference in New Issue