stricter linting, fix face annotations

pull/70/head
Vladimir Mandic 2020-12-27 08:12:22 -05:00
parent 90a121a362
commit cdd13e9fd9
42 changed files with 258 additions and 325 deletions

View File

@ -252,7 +252,6 @@ function webWorker(input, image, canvas, timestamp) {
// create new webworker and add event handler only once
log('creating worker thread');
worker = new Worker(ui.worker, { type: 'module' });
console.log('worker', worker);
// after receiving message from webworker, parse&draw results and send new frame for processing
worker.addEventListener('message', (msg) => {
if (msg.data.result.performance && msg.data.result.performance.total) ui.detectFPS.push(1000 / msg.data.result.performance.total);
@ -270,7 +269,7 @@ function webWorker(input, image, canvas, timestamp) {
});
}
// pass image data as arraybuffer to worker by reference to avoid copy
worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, userConfig }, [image.data.buffer]);
worker.postMessage({ image: image.data.buffer, width: canvas.width, height: canvas.height, userConfig }, 'Human', [image.data.buffer]);
}
// main processing function when input is webcam, can use direct invocation or web worker

View File

@ -1,3 +1,4 @@
// @ts-nocheck
/* eslint-disable max-len */
// based on: https://github.com/munrocket/gl-bench

View File

@ -1,3 +1,5 @@
// @ts-nocheck
let instance = 0;
let CSScreated = false;

View File

@ -24,6 +24,6 @@ onmessage = async (msg) => {
}
// must strip canvas from return value as it cannot be transfered from worker thread
if (result.canvas) result.canvas = null;
postMessage({ result });
postMessage({ result }, 'Human');
busy = false;
};

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,7 @@
{
"inputs": {
"dist/human.esm.js": {
"bytes": 1839826,
"bytes": 1838836,
"imports": []
},
"demo/draw.js": {
@ -9,15 +9,15 @@
"imports": []
},
"demo/menu.js": {
"bytes": 13858,
"bytes": 13874,
"imports": []
},
"demo/gl-bench.js": {
"bytes": 10782,
"bytes": 10797,
"imports": []
},
"demo/browser.js": {
"bytes": 25480,
"bytes": 25454,
"imports": [
{
"path": "dist/human.esm.js"
@ -38,14 +38,14 @@
"dist/demo-browser-index.js.map": {
"imports": [],
"inputs": {},
"bytes": 1870322
"bytes": 1868174
},
"dist/demo-browser-index.js": {
"imports": [],
"exports": [],
"inputs": {
"dist/human.esm.js": {
"bytesInOutput": 1832586
"bytesInOutput": 1832304
},
"demo/draw.js": {
"bytesInOutput": 7726
@ -57,10 +57,10 @@
"bytesInOutput": 7382
},
"demo/browser.js": {
"bytesInOutput": 19563
"bytesInOutput": 19542
}
},
"bytes": 1886446
"bytes": 1885439
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

22
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

54
dist/human.esm.json vendored
View File

@ -9,7 +9,7 @@
"imports": []
},
"src/tfjs/backend.js": {
"bytes": 1340,
"bytes": 1376,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -17,7 +17,7 @@
]
},
"src/face/blazeface.js": {
"bytes": 6986,
"bytes": 7024,
"imports": [
{
"path": "src/log.js"
@ -44,7 +44,7 @@
"imports": []
},
"src/face/facepipeline.js": {
"bytes": 14062,
"bytes": 14306,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -64,7 +64,7 @@
]
},
"src/face/facemesh.js": {
"bytes": 3054,
"bytes": 2991,
"imports": [
{
"path": "src/log.js"
@ -92,7 +92,7 @@
]
},
"src/age/age.js": {
"bytes": 2017,
"bytes": 2037,
"imports": [
{
"path": "src/log.js"
@ -106,7 +106,7 @@
]
},
"src/gender/gender.js": {
"bytes": 2886,
"bytes": 2906,
"imports": [
{
"path": "src/log.js"
@ -120,7 +120,7 @@
]
},
"src/emotion/emotion.js": {
"bytes": 3055,
"bytes": 3077,
"imports": [
{
"path": "src/log.js"
@ -134,7 +134,7 @@
]
},
"src/embedding/embedding.js": {
"bytes": 2041,
"bytes": 2063,
"imports": [
{
"path": "src/log.js"
@ -191,7 +191,7 @@
]
},
"src/body/decodePose.js": {
"bytes": 5216,
"bytes": 5368,
"imports": [
{
"path": "src/body/keypoints.js"
@ -205,7 +205,7 @@
]
},
"src/body/decodeMultiple.js": {
"bytes": 2303,
"bytes": 2373,
"imports": [
{
"path": "src/body/buildParts.js"
@ -227,7 +227,7 @@
]
},
"src/body/modelPoseNet.js": {
"bytes": 2395,
"bytes": 2519,
"imports": [
{
"path": "src/log.js"
@ -250,7 +250,7 @@
]
},
"src/body/posenet.js": {
"bytes": 614,
"bytes": 712,
"imports": [
{
"path": "src/body/modelPoseNet.js"
@ -264,7 +264,7 @@
]
},
"src/hand/box.js": {
"bytes": 3226,
"bytes": 2522,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -272,7 +272,7 @@
]
},
"src/hand/handdetector.js": {
"bytes": 4253,
"bytes": 3548,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -283,11 +283,11 @@
]
},
"src/hand/util.js": {
"bytes": 3030,
"bytes": 2346,
"imports": []
},
"src/hand/handpipeline.js": {
"bytes": 7951,
"bytes": 7246,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -308,7 +308,7 @@
"imports": []
},
"src/hand/handpose.js": {
"bytes": 3250,
"bytes": 2578,
"imports": [
{
"path": "src/log.js"
@ -332,11 +332,11 @@
"imports": []
},
"src/imagefx.js": {
"bytes": 19352,
"bytes": 19445,
"imports": []
},
"src/image.js": {
"bytes": 5841,
"bytes": 5871,
"imports": [
{
"path": "src/log.js"
@ -362,7 +362,7 @@
"imports": []
},
"src/human.js": {
"bytes": 16727,
"bytes": 17375,
"imports": [
{
"path": "src/log.js"
@ -419,7 +419,7 @@
"dist/human.esm.js.map": {
"imports": [],
"inputs": {},
"bytes": 1776771
"bytes": 1774476
},
"dist/human.esm.js": {
"imports": [],
@ -443,7 +443,7 @@
"bytesInOutput": 9513
},
"src/face/facemesh.js": {
"bytesInOutput": 2378
"bytesInOutput": 2317
},
"src/profile.js": {
"bytesInOutput": 846
@ -503,16 +503,16 @@
"bytesInOutput": 127032
},
"src/hand/handpose.js": {
"bytesInOutput": 2022
"bytesInOutput": 2018
},
"src/gesture/gesture.js": {
"bytesInOutput": 2463
},
"src/imagefx.js": {
"bytesInOutput": 13576
"bytesInOutput": 13628
},
"src/image.js": {
"bytesInOutput": 4041
"bytesInOutput": 3637
},
"src/log.js": {
"bytesInOutput": 266
@ -524,7 +524,7 @@
"bytesInOutput": 938
},
"src/human.js": {
"bytesInOutput": 11039
"bytesInOutput": 11170
},
"src/hand/box.js": {
"bytesInOutput": 1473
@ -542,7 +542,7 @@
"bytesInOutput": 22
}
},
"bytes": 1839826
"bytes": 1838836
}
}
}

22
dist/human.js vendored

File diff suppressed because one or more lines are too long

4
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

54
dist/human.json vendored
View File

@ -9,7 +9,7 @@
"imports": []
},
"src/tfjs/backend.js": {
"bytes": 1340,
"bytes": 1376,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -17,7 +17,7 @@
]
},
"src/face/blazeface.js": {
"bytes": 6986,
"bytes": 7024,
"imports": [
{
"path": "src/log.js"
@ -44,7 +44,7 @@
"imports": []
},
"src/face/facepipeline.js": {
"bytes": 14062,
"bytes": 14306,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -64,7 +64,7 @@
]
},
"src/face/facemesh.js": {
"bytes": 3054,
"bytes": 2991,
"imports": [
{
"path": "src/log.js"
@ -92,7 +92,7 @@
]
},
"src/age/age.js": {
"bytes": 2017,
"bytes": 2037,
"imports": [
{
"path": "src/log.js"
@ -106,7 +106,7 @@
]
},
"src/gender/gender.js": {
"bytes": 2886,
"bytes": 2906,
"imports": [
{
"path": "src/log.js"
@ -120,7 +120,7 @@
]
},
"src/emotion/emotion.js": {
"bytes": 3055,
"bytes": 3077,
"imports": [
{
"path": "src/log.js"
@ -134,7 +134,7 @@
]
},
"src/embedding/embedding.js": {
"bytes": 2041,
"bytes": 2063,
"imports": [
{
"path": "src/log.js"
@ -191,7 +191,7 @@
]
},
"src/body/decodePose.js": {
"bytes": 5216,
"bytes": 5368,
"imports": [
{
"path": "src/body/keypoints.js"
@ -205,7 +205,7 @@
]
},
"src/body/decodeMultiple.js": {
"bytes": 2303,
"bytes": 2373,
"imports": [
{
"path": "src/body/buildParts.js"
@ -227,7 +227,7 @@
]
},
"src/body/modelPoseNet.js": {
"bytes": 2395,
"bytes": 2519,
"imports": [
{
"path": "src/log.js"
@ -250,7 +250,7 @@
]
},
"src/body/posenet.js": {
"bytes": 614,
"bytes": 712,
"imports": [
{
"path": "src/body/modelPoseNet.js"
@ -264,7 +264,7 @@
]
},
"src/hand/box.js": {
"bytes": 3226,
"bytes": 2522,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -272,7 +272,7 @@
]
},
"src/hand/handdetector.js": {
"bytes": 4253,
"bytes": 3548,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -283,11 +283,11 @@
]
},
"src/hand/util.js": {
"bytes": 3030,
"bytes": 2346,
"imports": []
},
"src/hand/handpipeline.js": {
"bytes": 7951,
"bytes": 7246,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -308,7 +308,7 @@
"imports": []
},
"src/hand/handpose.js": {
"bytes": 3250,
"bytes": 2578,
"imports": [
{
"path": "src/log.js"
@ -332,11 +332,11 @@
"imports": []
},
"src/imagefx.js": {
"bytes": 19352,
"bytes": 19445,
"imports": []
},
"src/image.js": {
"bytes": 5841,
"bytes": 5871,
"imports": [
{
"path": "src/log.js"
@ -362,7 +362,7 @@
"imports": []
},
"src/human.js": {
"bytes": 16727,
"bytes": 17375,
"imports": [
{
"path": "src/log.js"
@ -419,7 +419,7 @@
"dist/human.js.map": {
"imports": [],
"inputs": {},
"bytes": 1776778
"bytes": 1774483
},
"dist/human.js": {
"imports": [],
@ -441,7 +441,7 @@
"bytesInOutput": 9513
},
"src/face/facemesh.js": {
"bytesInOutput": 2378
"bytesInOutput": 2317
},
"src/profile.js": {
"bytesInOutput": 846
@ -501,19 +501,19 @@
"bytesInOutput": 127032
},
"src/hand/handpose.js": {
"bytesInOutput": 2022
"bytesInOutput": 2018
},
"src/gesture/gesture.js": {
"bytesInOutput": 2463
},
"src/imagefx.js": {
"bytesInOutput": 13576
"bytesInOutput": 13628
},
"src/image.js": {
"bytesInOutput": 4040
"bytesInOutput": 3636
},
"src/human.js": {
"bytesInOutput": 11100
"bytesInOutput": 11231
},
"src/log.js": {
"bytesInOutput": 266
@ -540,7 +540,7 @@
"bytesInOutput": 22
}
},
"bytes": 1839900
"bytes": 1838910
}
}
}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

22
dist/human.node.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

58
dist/human.node.json vendored
View File

@ -9,7 +9,7 @@
"imports": []
},
"src/tfjs/backend.js": {
"bytes": 1340,
"bytes": 1376,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -17,7 +17,7 @@
]
},
"src/face/blazeface.js": {
"bytes": 6986,
"bytes": 7024,
"imports": [
{
"path": "src/log.js"
@ -44,7 +44,7 @@
"imports": []
},
"src/face/facepipeline.js": {
"bytes": 14062,
"bytes": 14306,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -64,7 +64,7 @@
]
},
"src/face/facemesh.js": {
"bytes": 3054,
"bytes": 2991,
"imports": [
{
"path": "src/log.js"
@ -92,7 +92,7 @@
]
},
"src/age/age.js": {
"bytes": 2017,
"bytes": 2037,
"imports": [
{
"path": "src/log.js"
@ -106,7 +106,7 @@
]
},
"src/gender/gender.js": {
"bytes": 2886,
"bytes": 2906,
"imports": [
{
"path": "src/log.js"
@ -120,7 +120,7 @@
]
},
"src/emotion/emotion.js": {
"bytes": 3055,
"bytes": 3077,
"imports": [
{
"path": "src/log.js"
@ -134,7 +134,7 @@
]
},
"src/embedding/embedding.js": {
"bytes": 2041,
"bytes": 2063,
"imports": [
{
"path": "src/log.js"
@ -191,7 +191,7 @@
]
},
"src/body/decodePose.js": {
"bytes": 5216,
"bytes": 5368,
"imports": [
{
"path": "src/body/keypoints.js"
@ -205,7 +205,7 @@
]
},
"src/body/decodeMultiple.js": {
"bytes": 2303,
"bytes": 2373,
"imports": [
{
"path": "src/body/buildParts.js"
@ -227,7 +227,7 @@
]
},
"src/body/modelPoseNet.js": {
"bytes": 2395,
"bytes": 2519,
"imports": [
{
"path": "src/log.js"
@ -250,7 +250,7 @@
]
},
"src/body/posenet.js": {
"bytes": 614,
"bytes": 712,
"imports": [
{
"path": "src/body/modelPoseNet.js"
@ -264,7 +264,7 @@
]
},
"src/hand/box.js": {
"bytes": 3226,
"bytes": 2522,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -272,7 +272,7 @@
]
},
"src/hand/handdetector.js": {
"bytes": 4253,
"bytes": 3548,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -283,11 +283,11 @@
]
},
"src/hand/util.js": {
"bytes": 3030,
"bytes": 2346,
"imports": []
},
"src/hand/handpipeline.js": {
"bytes": 7951,
"bytes": 7246,
"imports": [
{
"path": "dist/tfjs.esm.js"
@ -308,7 +308,7 @@
"imports": []
},
"src/hand/handpose.js": {
"bytes": 3250,
"bytes": 2578,
"imports": [
{
"path": "src/log.js"
@ -332,11 +332,11 @@
"imports": []
},
"src/imagefx.js": {
"bytes": 19352,
"bytes": 19445,
"imports": []
},
"src/image.js": {
"bytes": 5841,
"bytes": 5871,
"imports": [
{
"path": "src/log.js"
@ -362,7 +362,7 @@
"imports": []
},
"src/human.js": {
"bytes": 16727,
"bytes": 17375,
"imports": [
{
"path": "src/log.js"
@ -419,7 +419,7 @@
"dist/human.node-gpu.js.map": {
"imports": [],
"inputs": {},
"bytes": 702698
"bytes": 700374
},
"dist/human.node-gpu.js": {
"imports": [],
@ -444,7 +444,7 @@
"bytesInOutput": 9563
},
"src/face/facemesh.js": {
"bytesInOutput": 2421
"bytesInOutput": 2360
},
"src/profile.js": {
"bytesInOutput": 846
@ -495,28 +495,28 @@
"bytesInOutput": 634
},
"src/hand/handdetector.js": {
"bytesInOutput": 2880
"bytesInOutput": 2876
},
"src/hand/handpipeline.js": {
"bytesInOutput": 4559
"bytesInOutput": 4555
},
"src/hand/anchors.js": {
"bytesInOutput": 127034
},
"src/hand/handpose.js": {
"bytesInOutput": 2064
"bytesInOutput": 2060
},
"src/gesture/gesture.js": {
"bytesInOutput": 2467
},
"src/imagefx.js": {
"bytesInOutput": 13558
"bytesInOutput": 13610
},
"src/image.js": {
"bytesInOutput": 4069
"bytesInOutput": 3665
},
"src/human.js": {
"bytesInOutput": 11126
"bytesInOutput": 11257
},
"src/log.js": {
"bytesInOutput": 266
@ -540,7 +540,7 @@
"bytesInOutput": 20
}
},
"bytes": 310199
"bytes": 309201
}
}
}

View File

@ -46,6 +46,7 @@ async function predict(image, config) {
const profileAge = config.face.age.enabled ? await tf.profile(() => models.age.predict(enhance)) : {};
ageT = profileAge.result.clone();
profileAge.result.dispose();
// @ts-ignore
profile.run('age', profileAge);
}
enhance.dispose();

View File

@ -7,6 +7,7 @@ const kLocalMaximumRadius = 1;
function withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, { x, y }, keypointId) {
return poses.some(({ keypoints }) => {
const correspondingKeypoint = keypoints[keypointId].position;
// @ts-ignore
return vectors.squaredDistance(y, x, correspondingKeypoint.y, correspondingKeypoint.x) <= squaredNmsRadius;
});
}
@ -21,6 +22,7 @@ function getInstanceScore(existingPoses, squaredNmsRadius, instanceKeypoints) {
function decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, config) {
const poses = [];
// @ts-ignore
const queue = buildParts.buildPartWithScoreQueue(config.body.scoreThreshold, kLocalMaximumRadius, scoresBuffer);
const squaredNmsRadius = config.body.nmsRadius ^ 2;
// Generate at most maxDetections object instances per image in decreasing root part score order.
@ -28,9 +30,11 @@ function decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer
// The top element in the queue is the next root candidate.
const root = queue.dequeue();
// Part-based non-maximum suppression: We reject a root candidate if it is within a disk of `nmsRadius` pixels from the corresponding part of a previously detected instance.
// @ts-ignore
const rootImageCoords = vectors.getImageCoords(root.part, config.body.outputStride, offsetsBuffer);
if (withinNmsRadiusOfCorrespondingPoint(poses, squaredNmsRadius, rootImageCoords, root.part.id)) continue;
// Else start a new detection instance at the position of the root.
// @ts-ignore
const keypoints = decodePose.decodePose(root, scoresBuffer, offsetsBuffer, config.body.outputStride, displacementsFwdBuffer, displacementsBwdBuffer);
const score = getInstanceScore(poses, squaredNmsRadius, keypoints);
if (score > config.body.scoreThreshold) poses.push({ keypoints, score });

View File

@ -14,7 +14,9 @@ function getDisplacement(edgeId, point, displacements) {
}
function getStridedIndexNearPoint(point, outputStride, height, width) {
return {
// @ts-ignore
y: vectors.clamp(Math.round(point.y / outputStride), 0, height - 1),
// @ts-ignore
x: vectors.clamp(Math.round(point.x / outputStride), 0, width - 1),
};
}
@ -24,11 +26,14 @@ function traverseToTargetKeypoint(edgeId, sourceKeypoint, targetKeypointId, scor
// Nearest neighbor interpolation for the source->target displacements.
const sourceKeypointIndices = getStridedIndexNearPoint(sourceKeypoint.position, outputStride, height, width);
const displacement = getDisplacement(edgeId, sourceKeypointIndices, displacements);
// @ts-ignore
const displacedPoint = vectors.addVectors(sourceKeypoint.position, displacement);
let targetKeypoint = displacedPoint;
for (let i = 0; i < offsetRefineStep; i++) {
const targetKeypointIndices = getStridedIndexNearPoint(targetKeypoint, outputStride, height, width);
// @ts-ignore
const offsetPoint = vectors.getOffsetPoint(targetKeypointIndices.y, targetKeypointIndices.x, targetKeypointId, offsets);
// @ts-ignore
targetKeypoint = vectors.addVectors({
x: targetKeypointIndices.x * outputStride,
y: targetKeypointIndices.y * outputStride,
@ -45,6 +50,7 @@ function decodePose(root, scores, offsets, outputStride, displacementsFwd, displ
const instanceKeypoints = new Array(numParts);
// Start a new detection instance at the position of the root.
const { part: rootPart, score: rootScore } = root;
// @ts-ignore
const rootPoint = vectors.getImageCoords(rootPart, outputStride, offsets);
instanceKeypoints[rootPart.id] = {
score: rootScore,
@ -73,13 +79,16 @@ exports.decodePose = decodePose;
async function decodeSinglePose(heatmapScores, offsets, config) {
let totalScore = 0.0;
// @ts-ignore
const heatmapValues = decoders.argmax2d(heatmapScores);
const allTensorBuffers = await Promise.all([heatmapScores.buffer(), offsets.buffer(), heatmapValues.buffer()]);
const scoresBuffer = allTensorBuffers[0];
const offsetsBuffer = allTensorBuffers[1];
const heatmapValuesBuffer = allTensorBuffers[2];
// @ts-ignore
const offsetPoints = decoders.getOffsetPoints(heatmapValuesBuffer, config.body.outputStride, offsetsBuffer);
const offsetPointsBuffer = await offsetPoints.buffer();
// @ts-ignore
const keypointConfidence = Array.from(decoders.getPointsConfidence(scoresBuffer, heatmapValuesBuffer));
const instanceKeypoints = keypointConfidence.map((score, i) => {
totalScore += score;

View File

@ -9,12 +9,15 @@ async function estimateMultiple(input, res, config) {
return new Promise(async (resolve) => {
const height = input.shape[1];
const width = input.shape[2];
// @ts-ignore
const allTensorBuffers = await util.toTensorBuffers3D([res.heatmapScores, res.offsets, res.displacementFwd, res.displacementBwd]);
const scoresBuffer = allTensorBuffers[0];
const offsetsBuffer = allTensorBuffers[1];
const displacementsFwdBuffer = allTensorBuffers[2];
const displacementsBwdBuffer = allTensorBuffers[3];
// @ts-ignore
const poses = await decodeMultiple.decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, config);
// @ts-ignore
const scaled = util.scaleAndFlipPoses(poses, [height, width], [config.body.inputSize, config.body.inputSize]);
resolve(scaled);
});
@ -24,8 +27,10 @@ async function estimateSingle(input, res, config) {
return new Promise(async (resolve) => {
const height = input.shape[1];
const width = input.shape[2];
// @ts-ignore
const pose = await decodePose.decodeSinglePose(res.heatmapScores, res.offsets, config);
const poses = [pose];
// @ts-ignore
const scaled = util.scaleAndFlipPoses(poses, [height, width], [config.body.inputSize, config.body.inputSize]);
resolve(scaled);
});
@ -37,6 +42,7 @@ class PoseNet {
}
async estimatePoses(input, config) {
// @ts-ignore
const resized = util.resizeTo(input, [config.body.inputSize, config.body.inputSize]);
const res = this.baseModel.predict(resized, config);
@ -59,6 +65,7 @@ exports.PoseNet = PoseNet;
async function load(config) {
const model = await tf.loadGraphModel(config.body.modelPath);
// @ts-ignore
const mobilenet = new modelBase.BaseModel(model);
log(`load model: ${config.body.modelPath.match(/\/(.*)\./)[1]}`);
return new PoseNet(mobilenet);

View File

@ -2,15 +2,22 @@ import * as modelPoseNet from './modelPoseNet';
import * as keypoints from './keypoints';
import * as util from './util';
// @ts-ignore
exports.load = modelPoseNet.load;
// @ts-ignore
exports.PoseNet = modelPoseNet.PoseNet;
exports.partChannels = keypoints.partChannels;
exports.partIds = keypoints.partIds;
exports.partNames = keypoints.partNames;
exports.poseChain = keypoints.poseChain;
// @ts-ignore
exports.getAdjacentKeyPoints = util.getAdjacentKeyPoints;
// @ts-ignore
exports.getBoundingBox = util.getBoundingBox;
// @ts-ignore
exports.getBoundingBoxPoints = util.getBoundingBoxPoints;
// @ts-ignore
exports.scaleAndFlipPoses = util.scaleAndFlipPoses;
// @ts-ignore
exports.scalePose = util.scalePose;

View File

@ -39,6 +39,7 @@ async function predict(image, config) {
const profileData = await tf.profile(() => models.embedding.predict({ img_inputs: resize }));
data = [...profileData.result.dataSync()];
profileData.result.dispose();
// @ts-ignore
profile.run('emotion', profileData);
}
}

View File

@ -65,6 +65,7 @@ async function predict(image, config) {
const profileData = await tf.profile(() => models.emotion.predict(normalize));
data = profileData.result.dataSync();
profileData.result.dispose();
// @ts-ignore
profile.run('emotion', profileData);
}
for (let i = 0; i < data.length; i++) {

View File

@ -134,11 +134,13 @@ class BlazeFaceModel {
}
async estimateFaces(input) {
// @ts-ignore
const { boxes, scaleFactor } = await this.getBoundingBoxes(input);
const faces = [];
for (const face of boxes) {
const landmarkData = face.landmarks.arraySync();
const scaledBox = scaleBoxFromPrediction(face, scaleFactor);
// @ts-ignore
const boxData = scaleBox.arraySync();
const probabilityData = face.probability.arraySync();
const anchor = face.anchor;

View File

@ -6,6 +6,7 @@ import * as coords from './coords.js';
class MediaPipeFaceMesh {
constructor(blazeFace, blazeMeshModel, irisModel, config) {
// @ts-ignore
this.facePipeline = new facepipeline.Pipeline(blazeFace, blazeMeshModel, irisModel, config);
this.config = config;
}
@ -19,21 +20,17 @@ class MediaPipeFaceMesh {
const meshRaw = prediction.rawCoords;
const annotations = {};
if (mesh && mesh.length > 0) {
for (let key = 0; key < coords.MESH_ANNOTATIONS.length; key++) {
if (config.face.iris.enabled || key.includes('Iris') === false) {
for (const key of Object.keys(coords.MESH_ANNOTATIONS)) {
annotations[key] = coords.MESH_ANNOTATIONS[key].map((index) => mesh[index]);
}
}
}
const boxRaw = (config.face.mesh.returnRawData && prediction.box) ? { topLeft: prediction.box.startPoint, bottomRight: prediction.box.endPoint } : null;
const box = prediction.box ? [
Math.max(0, prediction.box.startPoint[0]),
Math.max(0, prediction.box.startPoint[1]),
Math.min(input.shape[2], prediction.box.endPoint[0]) - prediction.box.startPoint[0],
Math.min(input.shape[1], prediction.box.endPoint[1]) - prediction.box.startPoint[1],
] : 0;
results.push({
confidence: prediction.confidence || 0,
box,
@ -53,6 +50,7 @@ class MediaPipeFaceMesh {
let faceModels = [null, null, null];
async function load(config) {
faceModels = await Promise.all([
// @ts-ignore
(!faceModels[0] && config.face.enabled) ? blazeface.load(config) : null,
(!faceModels[1] && config.face.mesh.enabled) ? tf.loadGraphModel(config.face.mesh.modelPath, { fromTFHub: config.face.mesh.modelPath.includes('tfhub.dev') }) : null,
(!faceModels[2] && config.face.iris.enabled) ? tf.loadGraphModel(config.face.iris.modelPath, { fromTFHub: config.face.iris.modelPath.includes('tfhub.dev') }) : null,

View File

@ -56,6 +56,7 @@ class Pipeline {
}
transformRawCoords(rawCoords, box, angle, rotationMatrix) {
// @ts-ignore
const boxSize = bounding.getBoxSize({ startPoint: box.startPoint, endPoint: box.endPoint });
const scaleFactor = [boxSize[0] / this.meshWidth, boxSize[1] / this.meshHeight];
const coordsScaled = rawCoords.map((coord) => ([
@ -65,6 +66,7 @@ class Pipeline {
const coordsRotationMatrix = (angle !== 0) ? util.buildRotationMatrix(angle, [0, 0]) : util.IDENTITY_MATRIX;
const coordsRotated = (angle !== 0) ? coordsScaled.map((coord) => ([...util.rotatePoint(coord, coordsRotationMatrix), coord[2]])) : coordsScaled;
const inverseRotationMatrix = (angle !== 0) ? util.invertTransformMatrix(rotationMatrix) : util.IDENTITY_MATRIX;
// @ts-ignore
const boxCenter = [...bounding.getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint }), 1];
return coordsRotated.map((coord) => ([
coord[0] + util.dot(boxCenter, inverseRotationMatrix[0]),
@ -81,7 +83,9 @@ class Pipeline {
// Returns a box describing a cropped region around the eye fit for passing to the iris model.
getEyeBox(rawCoords, face, eyeInnerCornerIndex, eyeOuterCornerIndex, flip = false) {
// @ts-ignore
const box = bounding.squarifyBox(bounding.enlargeBox(this.calculateLandmarksBoundingBox([rawCoords[eyeInnerCornerIndex], rawCoords[eyeOuterCornerIndex]]), this.irisEnlarge));
// @ts-ignore
const boxSize = bounding.getBoxSize(box);
let crop = tf.image.cropAndResize(face, [[
box.startPoint[1] / this.meshHeight,
@ -155,8 +159,11 @@ class Pipeline {
return null;
}
for (let i = 0; i < this.storedBoxes.length; i++) {
// @ts-ignore
const scaledBox = bounding.scaleBoxCoordinates({ startPoint: this.storedBoxes[i].startPoint, endPoint: this.storedBoxes[i].endPoint }, detector.scaleFactor);
// @ts-ignore
const enlargedBox = bounding.enlargeBox(scaledBox);
// @ts-ignore
const squarifiedBox = bounding.squarifyBox(enlargedBox);
const landmarks = this.storedBoxes[i].landmarks.arraySync();
const confidence = this.storedBoxes[i].confidence;
@ -181,14 +188,17 @@ class Pipeline {
if (config.face.detector.rotation) {
const [indexOfMouth, indexOfForehead] = (box.landmarks.length >= LANDMARKS_COUNT) ? MESH_KEYPOINTS_LINE_OF_SYMMETRY_INDICES : BLAZEFACE_KEYPOINTS_LINE_OF_SYMMETRY_INDICES;
angle = util.computeRotation(box.landmarks[indexOfMouth], box.landmarks[indexOfForehead]);
// @ts-ignore
const faceCenter = bounding.getBoxCenter({ startPoint: box.startPoint, endPoint: box.endPoint });
const faceCenterNormalized = [faceCenter[0] / input.shape[2], faceCenter[1] / input.shape[1]];
const rotatedImage = tf.image.rotateWithOffset(input, angle, 0, faceCenterNormalized);
rotationMatrix = util.buildRotationMatrix(-angle, faceCenter);
// @ts-ignore
face = bounding.cutBoxFromImageAndResize({ startPoint: box.startPoint, endPoint: box.endPoint }, rotatedImage, [this.meshHeight, this.meshWidth]).div(255);
} else {
rotationMatrix = util.IDENTITY_MATRIX;
const cloned = input.clone();
// @ts-ignore
face = bounding.cutBoxFromImageAndResize({ startPoint: box.startPoint, endPoint: box.endPoint }, cloned, [this.meshHeight, this.meshWidth]).div(255);
}
@ -235,7 +245,9 @@ class Pipeline {
}
const transformedCoordsData = this.transformRawCoords(rawCoords, box, angle, rotationMatrix);
// @ts-ignore
const landmarksBox = bounding.enlargeBox(this.calculateLandmarksBoundingBox(transformedCoordsData));
// @ts-ignore
const squarifiedLandmarksBox = bounding.squarifyBox(landmarksBox);
const transformedCoords = tf.tensor2d(transformedCoordsData);
const prediction = {

View File

@ -53,6 +53,7 @@ async function predict(image, config) {
const profileGender = config.face.gender.enabled ? await tf.profile(() => models.gender.predict(enhance)) : {};
genderT = profileGender.result.clone();
profileGender.result.dispose();
// @ts-ignore
profile.run('gender', profileGender);
}
enhance.dispose();

View File

@ -1,19 +1,3 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '../../dist/tfjs.esm.js';
function getBoxSize(box) {

View File

@ -1,20 +1,3 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '../../dist/tfjs.esm.js';
import * as box from './box';

View File

@ -1,20 +1,3 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
import * as tf from '../../dist/tfjs.esm.js';
import * as box from './box';
import * as util from './util';

View File

@ -1,19 +1,3 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
// https://storage.googleapis.com/tfjs-models/demos/handpose/index.html
import { log } from '../log.js';
@ -74,7 +58,9 @@ async function load(config) {
config.hand.enabled ? tf.loadGraphModel(config.hand.detector.modelPath, { fromTFHub: config.hand.detector.modelPath.includes('tfhub.dev') }) : null,
config.hand.landmarks ? tf.loadGraphModel(config.hand.skeleton.modelPath, { fromTFHub: config.hand.skeleton.modelPath.includes('tfhub.dev') }) : null,
]);
// @ts-ignore
const handDetector = new handdetector.HandDetector(handDetectorModel, config.hand.inputSize, anchors.anchors);
// @ts-ignore
const handPipeline = new handpipeline.HandPipeline(handDetector, handPoseModel, config.hand.inputSize);
const handPose = new HandPose(handPipeline);
if (config.hand.enabled) log(`load model: ${config.hand.detector.modelPath.match(/\/(.*)\./)[1]}`);

View File

@ -1,19 +1,3 @@
/**
* @license
* Copyright 2020 Google LLC. All Rights Reserved.
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* =============================================================================
*/
function normalizeRadians(angle) {
return angle - 2 * Math.PI * Math.floor((angle + Math.PI) / (2 * Math.PI));
}
@ -42,6 +26,7 @@ function multiplyTransformMatrices(mat1, mat2) {
for (let row = 0; row < size; row++) {
product.push([]);
for (let col = 0; col < size; col++) {
// @ts-ignore
product[row].push(dot(mat1[row], getColumnFrom2DArr(mat2, col)));
}
}

View File

@ -18,6 +18,7 @@ import * as app from '../package.json';
// helper function: gets elapsed time on both browser and nodejs
const now = () => {
if (typeof performance !== 'undefined') return performance.now();
// @ts-ignore
return parseInt(Number(process.hrtime.bigint()) / 1000 / 1000);
};
@ -72,6 +73,7 @@ class Human {
}
profile() {
// @ts-ignore
if (this.config.profile) return profile.data;
return {};
}
@ -102,6 +104,7 @@ class Human {
}
simmilarity(embedding1, embedding2) {
// @ts-ignore
if (this.config.face.embedding.enabled) return embedding.simmilarity(embedding1, embedding2);
return 0;
}
@ -132,21 +135,35 @@ class Human {
this.models.posenet,
this.models.handpose,
] = await Promise.all([
// @ts-ignore
this.models.facemesh || (this.config.face.enabled ? facemesh.load(this.config) : null),
// @ts-ignore
this.models.age || ((this.config.face.enabled && this.config.face.age.enabled) ? age.load(this.config) : null),
// @ts-ignore
this.models.gender || ((this.config.face.enabled && this.config.face.gender.enabled) ? gender.load(this.config) : null),
// @ts-ignore
this.models.emotion || ((this.config.face.enabled && this.config.face.emotion.enabled) ? emotion.load(this.config) : null),
// @ts-ignore
this.models.embedding || ((this.config.face.enabled && this.config.face.embedding.enabled) ? embedding.load(this.config) : null),
// @ts-ignore
this.models.posenet || (this.config.body.enabled ? posenet.load(this.config) : null),
// @ts-ignore
this.models.handpose || (this.config.hand.enabled ? handpose.load(this.config) : null),
]);
} else {
// @ts-ignore
if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config);
// @ts-ignore
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);
// @ts-ignore
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
// @ts-ignore
if (this.config.face.enabled && this.config.face.emotion.enabled && !this.models.emotion) this.models.emotion = await emotion.load(this.config);
// @ts-ignore
if (this.config.face.enabled && this.config.face.embedding.enabled && !this.models.embedding) this.models.embedding = await embedding.load(this.config);
// @ts-ignore
if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);
// @ts-ignore
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config);
}
const current = Math.trunc(now() - timeStamp);
@ -213,7 +230,8 @@ class Human {
const faceRes = [];
this.state = 'run:face';
timeStamp = now();
const faces = await this.models.facemesh.estimateFaces(input, this.config);
// @ts-ignore
const faces = await this.models.facemesh?.estimateFaces(input, this.config);
this.perf.face = Math.trunc(now() - timeStamp);
for (const face of faces) {
this.analyze('Get Face');
@ -227,10 +245,12 @@ class Human {
// run age, inherits face from blazeface
this.analyze('Start Age:');
if (this.config.async) {
// @ts-ignore
ageRes = this.config.face.age.enabled ? age.predict(face.image, this.config) : {};
} else {
this.state = 'run:age';
timeStamp = now();
// @ts-ignore
ageRes = this.config.face.age.enabled ? await age.predict(face.image, this.config) : {};
this.perf.age = Math.trunc(now() - timeStamp);
}
@ -238,10 +258,12 @@ class Human {
// run gender, inherits face from blazeface
this.analyze('Start Gender:');
if (this.config.async) {
// @ts-ignore
genderRes = this.config.face.gender.enabled ? gender.predict(face.image, this.config) : {};
} else {
this.state = 'run:gender';
timeStamp = now();
// @ts-ignore
genderRes = this.config.face.gender.enabled ? await gender.predict(face.image, this.config) : {};
this.perf.gender = Math.trunc(now() - timeStamp);
}
@ -249,10 +271,12 @@ class Human {
// run emotion, inherits face from blazeface
this.analyze('Start Emotion:');
if (this.config.async) {
// @ts-ignore
emotionRes = this.config.face.emotion.enabled ? emotion.predict(face.image, this.config) : {};
} else {
this.state = 'run:emotion';
timeStamp = now();
// @ts-ignore
emotionRes = this.config.face.emotion.enabled ? await emotion.predict(face.image, this.config) : {};
this.perf.emotion = Math.trunc(now() - timeStamp);
}
@ -261,10 +285,12 @@ class Human {
// run emotion, inherits face from blazeface
this.analyze('Start Embedding:');
if (this.config.async) {
// @ts-ignore
embeddingRes = this.config.face.embedding.enabled ? embedding.predict(face.image, this.config) : {};
} else {
this.state = 'run:embedding';
timeStamp = now();
// @ts-ignore
embeddingRes = this.config.face.embedding.enabled ? await embedding.predict(face.image, this.config) : {};
this.perf.embedding = Math.trunc(now() - timeStamp);
}
@ -291,7 +317,6 @@ class Human {
confidence: face.confidence,
box: face.box,
mesh: face.mesh,
// AT: boxRaw, meshRaw
boxRaw: face.boxRaw,
meshRaw: face.meshRaw,
annotations: face.annotations,
@ -317,6 +342,7 @@ class Human {
async image(input, userConfig = {}) {
this.state = 'image';
this.config = mergeDeep(this.config, userConfig);
// @ts-ignore
const process = image.process(input, this.config);
process.tensor.dispose();
return process.canvas;
@ -356,6 +382,7 @@ class Human {
this.analyze('Start Scope:');
timeStamp = now();
// @ts-ignore
const process = image.process(input, this.config);
if (!process || !process.tensor) {
log('could not convert input to tensor');
@ -379,12 +406,14 @@ class Human {
// run posenet
this.analyze('Start Body:');
if (this.config.async) {
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : [];
// @ts-ignore
poseRes = this.config.body.enabled ? this.models.posenet?.estimatePoses(process.tensor, this.config) : [];
if (this.perf.body) delete this.perf.body;
} else {
this.state = 'run:body';
timeStamp = now();
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : [];
// @ts-ignore
poseRes = this.config.body.enabled ? await this.models.posenet?.estimatePoses(process.tensor, this.config) : [];
this.perf.body = Math.trunc(now() - timeStamp);
}
this.analyze('End Body:');
@ -392,12 +421,14 @@ class Human {
// run handpose
this.analyze('Start Hand:');
if (this.config.async) {
handRes = this.config.hand.enabled ? this.models.handpose.estimateHands(process.tensor, this.config) : [];
// @ts-ignore
handRes = this.config.hand.enabled ? this.models.handpose?.estimateHands(process.tensor, this.config) : [];
if (this.perf.hand) delete this.perf.hand;
} else {
this.state = 'run:hand';
timeStamp = now();
handRes = this.config.hand.enabled ? await this.models.handpose.estimateHands(process.tensor, this.config) : [];
// @ts-ignore
handRes = this.config.hand.enabled ? await this.models.handpose?.estimateHands(process.tensor, this.config) : [];
this.perf.hand = Math.trunc(now() - timeStamp);
}
// this.analyze('End Hand:');

View File

@ -41,6 +41,7 @@ function process(input, config) {
if (outCanvas.height !== inCanvas.height) outCanvas.height = inCanvas.height;
this.fx = tf.ENV.flags.IS_BROWSER ? new fxImage.Canvas({ canvas: outCanvas }) : null; // && (typeof document !== 'undefined')
}
if (!this.fx) return inCanvas;
this.fx.reset();
this.fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled
if (config.filter.contrast !== 0) this.fx.addFilter('contrast', config.filter.contrast);
@ -58,8 +59,8 @@ function process(input, config) {
if (config.filter.pixelate !== 0) this.fx.addFilter('pixelate', config.filter.pixelate);
this.fx.apply(inCanvas);
// read pixel data
// const gl = outCanvas.getContext('webgl');
const gl = false;
/*
const gl = outCanvas.getContext('webgl');
if (gl) {
const glBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 4);
const pixBuffer = new Uint8Array(outCanvas.width * outCanvas.height * 3);
@ -77,6 +78,7 @@ function process(input, config) {
}
outCanvas.data = pixBuffer;
}
*/
} else {
outCanvas = inCanvas;
}
@ -93,8 +95,8 @@ function process(input, config) {
tempCanvas.width = targetWidth;
tempCanvas.height = targetHeight;
const tempCtx = tempCanvas.getContext('2d');
tempCtx.drawImage(outCanvas, 0, 0);
const data = tempCtx.getImageData(0, 0, targetWidth, targetHeight);
tempCtx?.drawImage(outCanvas, 0, 0);
const data = tempCtx?.getImageData(0, 0, targetWidth, targetHeight);
pixels = tf.browser.fromPixels(data);
}
const casted = pixels.toFloat();

View File

@ -20,6 +20,7 @@ const WebGLProgram = function (gl, vertexSource, fragmentSource) {
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
// @ts-ignore
throw new Error('Filter: GL compile failed', gl.getShaderInfoLog(shader));
}
return shader;
@ -37,6 +38,7 @@ const WebGLProgram = function (gl, vertexSource, fragmentSource) {
gl.linkProgram(this.id);
if (!gl.getProgramParameter(this.id, gl.LINK_STATUS)) {
// @ts-ignore
throw new Error('Filter: GL link failed', gl.getProgramInfoLog(this.id));
}
@ -149,9 +151,8 @@ const WebGLImageFilter = function (params) {
};
const _getTempFramebuffer = function (index) {
_tempFramebuffers[index] = _tempFramebuffers[index]
|| _createFramebufferTexture(_width, _height);
// @ts-ignore
_tempFramebuffers[index] = _tempFramebuffers[index] || _createFramebufferTexture(_width, _height);
return _tempFramebuffers[index];
};
@ -190,7 +191,8 @@ const WebGLImageFilter = function (params) {
source = _sourceTexture;
} else {
// All following draw calls use the temp buffer last drawn to
source = _getTempFramebuffer(_currentFramebufferIndex).texture;
// @ts-ignore
source = _getTempFramebuffer(_currentFramebufferIndex)?.texture;
}
_drawCount++;
@ -203,7 +205,8 @@ const WebGLImageFilter = function (params) {
} else {
// Intermediate draw call - get a temp buffer to draw to
_currentFramebufferIndex = (_currentFramebufferIndex + 1) % 2;
target = _getTempFramebuffer(_currentFramebufferIndex).fbo;
// @ts-ignore
target = _getTempFramebuffer(_currentFramebufferIndex)?.fbo;
}
// Bind the source and target and draw the two triangles

View File

@ -20,7 +20,9 @@ export const config = {
export function register() {
if (!tf.findBackend(config.name)) {
// @ts-ignore
config.canvas = (typeof OffscreenCanvas !== 'undefined') ? new OffscreenCanvas(config.width, config.height) : document.createElement('canvas');
// @ts-ignore
const gl = config.canvas.getContext('webgl2', config.webGLattr);
tf.setWebGLContext(2, gl);
const ctx = new tf.GPGPUContext(gl);

25
tsconfig.json Normal file
View File

@ -0,0 +1,25 @@
{
"compilerOptions": {
"module": "es2020",
"target": "es2018",
"moduleResolution": "node",
"lib": ["es2018", "dom"],
"typeRoots": ["node_modules/@types"],
"outDir": "types",
"declaration": true,
"emitDeclarationOnly": true,
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"importHelpers": true,
"noImplicitAny": false,
"preserveConstEnums": true,
"removeComments": false,
"resolveJsonModule": true,
"skipLibCheck": true,
"sourceMap": false,
"strictNullChecks": false,
"allowJs": true
},
"formatCodeOptions": { "indentSize": 2, "tabSize": 2 },
"include": ["src/*", "demo/*"],
}

2
wiki

@ -1 +1 @@
Subproject commit 699af2235b315ef24766839ddc49a198f7cc21c3
Subproject commit 43425df86424e9b5f4bec3510f3565963255d4a9