mirror of https://github.com/vladmandic/human
optimizations
parent
fc1c899bfb
commit
0f35f6bf3e
85
config.js
85
config.js
|
@ -5,20 +5,27 @@ export default {
|
|||
backend: 'webgl', // select tfjs backend to use
|
||||
console: true, // enable debugging output to console
|
||||
async: true, // execute enabled models in parallel
|
||||
// this disables per-model performance data but slightly increases performance
|
||||
// this disables per-model performance data but
|
||||
// slightly increases performance
|
||||
// cannot be used if profiling is enabled
|
||||
profile: false, // enable tfjs profiling
|
||||
// this has significant performance impact, only enable for debugging purposes
|
||||
// this has significant performance impact
|
||||
// only enable for debugging purposes
|
||||
// currently only implemented for age,gender,emotion models
|
||||
deallocate: false, // aggresively deallocate gpu memory after each usage
|
||||
// only valid for webgl backend and only during first call, cannot be changed unless library is reloaded
|
||||
// this has significant performance impact, only enable on low-memory devices
|
||||
// only valid for webgl backend and only during first call
|
||||
// cannot be changed unless library is reloaded
|
||||
// this has significant performance impact
|
||||
// only enable on low-memory devices
|
||||
scoped: false, // enable scoped runs
|
||||
// some models *may* have memory leaks, this wrapps everything in a local scope at a cost of performance
|
||||
// some models *may* have memory leaks,
|
||||
// this wrapps everything in a local scope at a cost of performance
|
||||
// typically not needed
|
||||
videoOptimized: true, // perform additional optimizations when input is video, must be disabled for images
|
||||
videoOptimized: true, // perform additional optimizations when input is video,
|
||||
// must be disabled for images
|
||||
// basically this skips object box boundary detection for every n frames
|
||||
// while maintaining in-box detection since objects cannot move that fast
|
||||
|
||||
filter: {
|
||||
enabled: true, // enable image pre-processing filters
|
||||
width: 0, // resize input width
|
||||
|
@ -41,50 +48,67 @@ export default {
|
|||
polaroid: false, // image polaroid camera effect
|
||||
pixelate: 0, // range: 0 (no pixelate) to N (number of pixels to pixelate)
|
||||
},
|
||||
|
||||
gesture: {
|
||||
enabled: true, // enable simple gesture recognition
|
||||
},
|
||||
|
||||
face: {
|
||||
enabled: true, // controls if specified modul is enabled
|
||||
// face.enabled is required for all face models: detector, mesh, iris, age, gender, emotion
|
||||
// face.enabled is required for all face models:
|
||||
// detector, mesh, iris, age, gender, emotion
|
||||
// (note: module is not loaded until it is required)
|
||||
detector: {
|
||||
modelPath: '../models/blazeface-back.json', // can be 'front' or 'back'.
|
||||
// 'front' is optimized for large faces such as front-facing camera and 'back' is optimized for distanct faces.
|
||||
// 'front' is optimized for large faces
|
||||
// such as front-facing camera and
|
||||
// 'back' is optimized for distanct faces.
|
||||
inputSize: 256, // fixed value: 128 for front and 256 for 'back'
|
||||
maxFaces: 10, // maximum number of faces detected in the input, should be set to the minimum number for performance
|
||||
skipFrames: 15, // how many frames to go without re-running the face bounding box detector, only used for video inputs
|
||||
// if model is running st 25 FPS, we can re-use existing bounding box for updated face mesh analysis
|
||||
// as face probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
||||
maxFaces: 10, // maximum number of faces detected in the input
|
||||
// should be set to the minimum number for performance
|
||||
skipFrames: 15, // how many frames to go without re-running the face bounding box detector
|
||||
// only used for video inputs
|
||||
// e.g., if model is running st 25 FPS, we can re-use existing bounding
|
||||
// box for updated face analysis as the head probably hasn't moved much
|
||||
// in short time (10 * 1/25 = 0.25 sec)
|
||||
minConfidence: 0.1, // threshold for discarding a prediction
|
||||
iouThreshold: 0.1, // threshold for deciding whether boxes overlap too much in non-maximum suppression (0.1 means drop if overlap 10%)
|
||||
scoreThreshold: 0.2, // threshold for deciding when to remove boxes based on score in non-maximum suppression, this is applied on detection objects only and before minConfidence
|
||||
iouThreshold: 0.1, // threshold for deciding whether boxes overlap too much in
|
||||
// non-maximum suppression (0.1 means drop if overlap 10%)
|
||||
scoreThreshold: 0.2, // threshold for deciding when to remove boxes based on score
|
||||
// in non-maximum suppression,
|
||||
// this is applied on detection objects only and before minConfidence
|
||||
},
|
||||
|
||||
mesh: {
|
||||
enabled: true,
|
||||
modelPath: '../models/facemesh.json',
|
||||
inputSize: 192, // fixed value
|
||||
},
|
||||
|
||||
iris: {
|
||||
enabled: true,
|
||||
modelPath: '../models/iris.json',
|
||||
enlargeFactor: 2.3, // empiric tuning
|
||||
inputSize: 64, // fixed value
|
||||
},
|
||||
|
||||
age: {
|
||||
enabled: true,
|
||||
modelPath: '../models/age-ssrnet-imdb.json', // can be 'age-ssrnet-imdb' or 'age-ssrnet-wiki'
|
||||
// which determines training set for model
|
||||
inputSize: 64, // fixed value
|
||||
skipFrames: 15, // how many frames to go without re-running the detector, only used for video inputs
|
||||
skipFrames: 15, // how many frames to go without re-running the detector
|
||||
// only used for video inputs
|
||||
},
|
||||
|
||||
gender: {
|
||||
enabled: true,
|
||||
minConfidence: 0.1, // threshold for discarding a prediction
|
||||
modelPath: '../models/gender-ssrnet-imdb.json', // can be 'gender', 'gender-ssrnet-imdb' or 'gender-ssrnet-wiki'
|
||||
inputSize: 64, // fixed value
|
||||
skipFrames: 15, // how many frames to go without re-running the detector, only used for video inputs
|
||||
skipFrames: 15, // how many frames to go without re-running the detector
|
||||
// only used for video inputs
|
||||
},
|
||||
|
||||
emotion: {
|
||||
enabled: true,
|
||||
inputSize: 64, // fixed value
|
||||
|
@ -93,26 +117,33 @@ export default {
|
|||
modelPath: '../models/emotion-large.json', // can be 'mini', 'large'
|
||||
},
|
||||
},
|
||||
|
||||
body: {
|
||||
enabled: true,
|
||||
modelPath: '../models/posenet.json',
|
||||
inputResolution: 257, // fixed value
|
||||
outputStride: 16, // fixed value
|
||||
maxDetections: 10, // maximum number of people detected in the input, should be set to the minimum number for performance
|
||||
scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
||||
maxDetections: 10, // maximum number of people detected in the input
|
||||
// should be set to the minimum number for performance
|
||||
scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on score
|
||||
// in non-maximum suppression
|
||||
nmsRadius: 20, // radius for deciding points are too close in non-maximum suppression
|
||||
},
|
||||
|
||||
hand: {
|
||||
enabled: true,
|
||||
inputSize: 256, // fixed value
|
||||
skipFrames: 15, // how many frames to go without re-running the hand bounding box detector, only used for video inputs
|
||||
// if model is running st 25 FPS, we can re-use existing bounding box for updated hand skeleton analysis
|
||||
// as the hand probably hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
||||
skipFrames: 15, // how many frames to go without re-running the hand bounding box detector
|
||||
// only used for video inputs
|
||||
// e.g., if model is running st 25 FPS, we can re-use existing bounding
|
||||
// box for updated hand skeleton analysis as the hand probably
|
||||
// hasn't moved much in short time (10 * 1/25 = 0.25 sec)
|
||||
minConfidence: 0.5, // threshold for discarding a prediction
|
||||
iouThreshold: 0.1, // threshold for deciding whether boxes overlap too much in non-maximum suppression
|
||||
scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on score in non-maximum suppression
|
||||
enlargeFactor: 1.65, // empiric tuning as skeleton prediction prefers hand box with some whitespace
|
||||
maxHands: 1, // maximum number of hands detected in the input, should be set to the minimum number for performance
|
||||
iouThreshold: 0.1, // threshold for deciding whether boxes overlap too much
|
||||
// in non-maximum suppression
|
||||
scoreThreshold: 0.8, // threshold for deciding when to remove boxes based on
|
||||
// score in non-maximum suppression
|
||||
maxHands: 1, // maximum number of hands detected in the input
|
||||
// should be set to the minimum number for performance
|
||||
landmarks: true, // detect hand landmarks or just hand boundary box
|
||||
detector: {
|
||||
modelPath: '../models/handdetect.json',
|
||||
|
|
|
@ -12,6 +12,7 @@ const ui = {
|
|||
baseFontProto: 'small-caps {size} "Segoe UI"',
|
||||
baseLineWidth: 12,
|
||||
baseLineHeightProto: 2,
|
||||
crop: true,
|
||||
columns: 2,
|
||||
busy: false,
|
||||
facing: true,
|
||||
|
@ -21,7 +22,7 @@ const ui = {
|
|||
drawBoxes: true,
|
||||
drawPoints: false,
|
||||
drawPolygons: true,
|
||||
fillPolygons: true,
|
||||
fillPolygons: false,
|
||||
useDepth: true,
|
||||
console: true,
|
||||
maxFrames: 10,
|
||||
|
@ -132,7 +133,7 @@ async function setupCamera() {
|
|||
audio: false,
|
||||
video: {
|
||||
facingMode: (ui.facing ? 'user' : 'environment'),
|
||||
resizeMode: 'none',
|
||||
resizeMode: ui.crop ? 'crop-and-scale' : 'none',
|
||||
width: { ideal: window.innerWidth },
|
||||
height: { ideal: window.innerHeight },
|
||||
},
|
||||
|
@ -206,7 +207,8 @@ function runHumanDetect(input, canvas) {
|
|||
const live = input.srcObject && (input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState > 2) && (!input.paused);
|
||||
if (!live && input.srcObject) {
|
||||
// if we want to continue and camera not ready, retry in 0.5sec, else just give up
|
||||
if ((input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState <= 2)) setTimeout(() => runHumanDetect(input, canvas), 500);
|
||||
if (input.paused) log('camera paused');
|
||||
else if ((input.srcObject.getVideoTracks()[0].readyState === 'live') && (input.readyState <= 2)) setTimeout(() => runHumanDetect(input, canvas), 500);
|
||||
else log(`camera not ready: track state: ${input.srcObject?.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
|
||||
return;
|
||||
}
|
||||
|
@ -223,7 +225,6 @@ function runHumanDetect(input, canvas) {
|
|||
human.detect(input).then((result) => {
|
||||
if (result.error) log(result.error);
|
||||
else drawResults(input, result, canvas);
|
||||
if (human.config.profile) log('profile data:', human.profile());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -300,48 +301,48 @@ function setupMenu() {
|
|||
document.getElementById('play').addEventListener('click', () => btn.click());
|
||||
|
||||
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||
menu.addList('Backend', ['cpu', 'webgl', 'wasm', 'webgpu'], human.config.backend, (val) => human.config.backend = val);
|
||||
menu.addBool('Async Operations', human.config, 'async', (val) => human.config.async = val);
|
||||
menu.addBool('Enable Profiler', human.config, 'profile', (val) => human.config.profile = val);
|
||||
menu.addBool('Memory Shield', human.config, 'deallocate', (val) => human.config.deallocate = val);
|
||||
menu.addBool('Use Web Worker', ui, 'useWorker');
|
||||
menu.addList('backend', ['cpu', 'webgl', 'wasm', 'webgpu'], human.config.backend, (val) => human.config.backend = val);
|
||||
menu.addBool('async operations', human.config, 'async', (val) => human.config.async = val);
|
||||
menu.addBool('enable profiler', human.config, 'profile', (val) => human.config.profile = val);
|
||||
menu.addBool('memory shield', human.config, 'deallocate', (val) => human.config.deallocate = val);
|
||||
menu.addBool('use web worker', ui, 'useWorker');
|
||||
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||
menu.addLabel('Enabled Models');
|
||||
menu.addBool('Face Detect', human.config.face, 'enabled');
|
||||
menu.addBool('Face Mesh', human.config.face.mesh, 'enabled');
|
||||
menu.addBool('Face Iris', human.config.face.iris, 'enabled');
|
||||
menu.addBool('Face Age', human.config.face.age, 'enabled');
|
||||
menu.addBool('Face Gender', human.config.face.gender, 'enabled');
|
||||
menu.addBool('Face Emotion', human.config.face.emotion, 'enabled');
|
||||
menu.addBool('Body Pose', human.config.body, 'enabled');
|
||||
menu.addBool('Hand Pose', human.config.hand, 'enabled');
|
||||
menu.addBool('Gesture Analysis', human.config.gesture, 'enabled');
|
||||
menu.addLabel('enabled models');
|
||||
menu.addBool('face detect', human.config.face, 'enabled');
|
||||
menu.addBool('face mesh', human.config.face.mesh, 'enabled');
|
||||
menu.addBool('face iris', human.config.face.iris, 'enabled');
|
||||
menu.addBool('face age', human.config.face.age, 'enabled');
|
||||
menu.addBool('face gender', human.config.face.gender, 'enabled');
|
||||
menu.addBool('face emotion', human.config.face.emotion, 'enabled');
|
||||
menu.addBool('body pose', human.config.body, 'enabled');
|
||||
menu.addBool('hand pose', human.config.hand, 'enabled');
|
||||
menu.addBool('gesture analysis', human.config.gesture, 'enabled');
|
||||
|
||||
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||
menu.addLabel('Model Parameters');
|
||||
menu.addRange('Max Objects', human.config.face.detector, 'maxFaces', 1, 50, 1, (val) => {
|
||||
menu.addLabel('model parameters');
|
||||
menu.addRange('max objects', human.config.face.detector, 'maxFaces', 1, 50, 1, (val) => {
|
||||
human.config.face.detector.maxFaces = parseInt(val);
|
||||
human.config.body.maxDetections = parseInt(val);
|
||||
human.config.hand.maxHands = parseInt(val);
|
||||
});
|
||||
menu.addRange('Skip Frames', human.config.face.detector, 'skipFrames', 0, 50, 1, (val) => {
|
||||
menu.addRange('skip frames', human.config.face.detector, 'skipFrames', 0, 50, 1, (val) => {
|
||||
human.config.face.detector.skipFrames = parseInt(val);
|
||||
human.config.face.emotion.skipFrames = parseInt(val);
|
||||
human.config.face.age.skipFrames = parseInt(val);
|
||||
human.config.hand.skipFrames = parseInt(val);
|
||||
});
|
||||
menu.addRange('Min Confidence', human.config.face.detector, 'minConfidence', 0.0, 1.0, 0.05, (val) => {
|
||||
menu.addRange('min confidence', human.config.face.detector, 'minConfidence', 0.0, 1.0, 0.05, (val) => {
|
||||
human.config.face.detector.minConfidence = parseFloat(val);
|
||||
human.config.face.gender.minConfidence = parseFloat(val);
|
||||
human.config.face.emotion.minConfidence = parseFloat(val);
|
||||
human.config.hand.minConfidence = parseFloat(val);
|
||||
});
|
||||
menu.addRange('Score Threshold', human.config.face.detector, 'scoreThreshold', 0.1, 1.0, 0.05, (val) => {
|
||||
menu.addRange('score threshold', human.config.face.detector, 'scoreThreshold', 0.1, 1.0, 0.05, (val) => {
|
||||
human.config.face.detector.scoreThreshold = parseFloat(val);
|
||||
human.config.hand.scoreThreshold = parseFloat(val);
|
||||
human.config.body.scoreThreshold = parseFloat(val);
|
||||
});
|
||||
menu.addRange('IOU Threshold', human.config.face.detector, 'iouThreshold', 0.1, 1.0, 0.05, (val) => {
|
||||
menu.addRange('overlap', human.config.face.detector, 'iouThreshold', 0.1, 1.0, 0.05, (val) => {
|
||||
human.config.face.detector.iouThreshold = parseFloat(val);
|
||||
human.config.hand.iouThreshold = parseFloat(val);
|
||||
});
|
||||
|
@ -350,31 +351,32 @@ function setupMenu() {
|
|||
menu.addChart('FPS', 'FPS');
|
||||
|
||||
menuFX = new Menu(document.body, '...', { top: '1rem', right: '18rem' });
|
||||
menuFX.addLabel('UI Options');
|
||||
menuFX.addBool('Camera Front/Back', ui, 'facing', () => setupCamera());
|
||||
menuFX.addBool('Use 3D Depth', ui, 'useDepth');
|
||||
menuFX.addBool('Draw Boxes', ui, 'drawBoxes');
|
||||
menuFX.addBool('Draw Points', ui, 'drawPoints');
|
||||
menuFX.addBool('Draw Polygons', ui, 'drawPolygons');
|
||||
menuFX.addLabel('ui options');
|
||||
menuFX.addBool('crop & scale', ui, 'crop', () => setupCamera());
|
||||
menuFX.addBool('camera front/back', ui, 'facing', () => setupCamera());
|
||||
menuFX.addBool('use 3D depth', ui, 'useDepth');
|
||||
menuFX.addBool('draw boxes', ui, 'drawBoxes');
|
||||
menuFX.addBool('draw polygons', ui, 'drawPolygons');
|
||||
menuFX.addBool('Fill Polygons', ui, 'fillPolygons');
|
||||
menuFX.addBool('draw points', ui, 'drawPoints');
|
||||
menuFX.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||
menuFX.addLabel('Image Processing');
|
||||
menuFX.addBool('Enabled', human.config.filter, 'enabled');
|
||||
ui.menuWidth = menuFX.addRange('Image width', human.config.filter, 'width', 0, 3840, 10, (val) => human.config.filter.width = parseInt(val));
|
||||
ui.menuHeight = menuFX.addRange('Image height', human.config.filter, 'height', 0, 2160, 10, (val) => human.config.filter.height = parseInt(val));
|
||||
menuFX.addRange('Brightness', human.config.filter, 'brightness', -1.0, 1.0, 0.05, (val) => human.config.filter.brightness = parseFloat(val));
|
||||
menuFX.addRange('Contrast', human.config.filter, 'contrast', -1.0, 1.0, 0.05, (val) => human.config.filter.contrast = parseFloat(val));
|
||||
menuFX.addRange('Sharpness', human.config.filter, 'sharpness', 0, 1.0, 0.05, (val) => human.config.filter.sharpness = parseFloat(val));
|
||||
menuFX.addRange('Blur', human.config.filter, 'blur', 0, 20, 1, (val) => human.config.filter.blur = parseInt(val));
|
||||
menuFX.addRange('Saturation', human.config.filter, 'saturation', -1.0, 1.0, 0.05, (val) => human.config.filter.saturation = parseFloat(val));
|
||||
menuFX.addRange('Hue', human.config.filter, 'hue', 0, 360, 5, (val) => human.config.filter.hue = parseInt(val));
|
||||
menuFX.addRange('Pixelate', human.config.filter, 'pixelate', 0, 32, 1, (val) => human.config.filter.pixelate = parseInt(val));
|
||||
menuFX.addBool('Negative', human.config.filter, 'negative');
|
||||
menuFX.addBool('Sepia', human.config.filter, 'sepia');
|
||||
menuFX.addBool('Vintage', human.config.filter, 'vintage');
|
||||
menuFX.addBool('Kodachrome', human.config.filter, 'kodachrome');
|
||||
menuFX.addBool('Technicolor', human.config.filter, 'technicolor');
|
||||
menuFX.addBool('Polaroid', human.config.filter, 'polaroid');
|
||||
menuFX.addLabel('image processing');
|
||||
menuFX.addBool('enabled', human.config.filter, 'enabled');
|
||||
ui.menuWidth = menuFX.addRange('image width', human.config.filter, 'width', 0, 3840, 10, (val) => human.config.filter.width = parseInt(val));
|
||||
ui.menuHeight = menuFX.addRange('image height', human.config.filter, 'height', 0, 2160, 10, (val) => human.config.filter.height = parseInt(val));
|
||||
menuFX.addRange('brightness', human.config.filter, 'brightness', -1.0, 1.0, 0.05, (val) => human.config.filter.brightness = parseFloat(val));
|
||||
menuFX.addRange('contrast', human.config.filter, 'contrast', -1.0, 1.0, 0.05, (val) => human.config.filter.contrast = parseFloat(val));
|
||||
menuFX.addRange('sharpness', human.config.filter, 'sharpness', 0, 1.0, 0.05, (val) => human.config.filter.sharpness = parseFloat(val));
|
||||
menuFX.addRange('blur', human.config.filter, 'blur', 0, 20, 1, (val) => human.config.filter.blur = parseInt(val));
|
||||
menuFX.addRange('saturation', human.config.filter, 'saturation', -1.0, 1.0, 0.05, (val) => human.config.filter.saturation = parseFloat(val));
|
||||
menuFX.addRange('hue', human.config.filter, 'hue', 0, 360, 5, (val) => human.config.filter.hue = parseInt(val));
|
||||
menuFX.addRange('pixelate', human.config.filter, 'pixelate', 0, 32, 1, (val) => human.config.filter.pixelate = parseInt(val));
|
||||
menuFX.addBool('negative', human.config.filter, 'negative');
|
||||
menuFX.addBool('sepia', human.config.filter, 'sepia');
|
||||
menuFX.addBool('vintage', human.config.filter, 'vintage');
|
||||
menuFX.addBool('kodachrome', human.config.filter, 'kodachrome');
|
||||
menuFX.addBool('technicolor', human.config.filter, 'technicolor');
|
||||
menuFX.addBool('polaroid', human.config.filter, 'polaroid');
|
||||
}
|
||||
|
||||
async function main() {
|
||||
|
|
|
@ -29,7 +29,7 @@ function createCSS() {
|
|||
.menu-item { display: flex; white-space: nowrap; padding: 0.2rem; width: max-content; cursor: default; }
|
||||
.menu-title { text-align: right; cursor: pointer; }
|
||||
.menu-hr { margin: 0.2rem; border: 1px solid rgba(0, 0, 0, 0.5) }
|
||||
.menu-label { padding: 0; }
|
||||
.menu-label { padding: 0; font-weight: 800; }
|
||||
|
||||
.menu-list { margin-right: 0.8rem; }
|
||||
select:focus { outline: none; }
|
||||
|
|
|
@ -24,8 +24,6 @@ const log = require('@vladmandic/pilogger');
|
|||
// openssl req -x509 -newkey rsa:4096 -nodes -keyout dev-server.key -out dev-server.crt -days 365 -subj "/C=US/ST=Florida/L=Miami/O=@vladmandic"
|
||||
// client app does not work without secure server since browsers enforce https for webcam access
|
||||
const options = {
|
||||
// key: fs.readFileSync('/home/vlado/dev/piproxy/cert/private.pem'),
|
||||
// cert: fs.readFileSync('/home/vlado/dev/piproxy/cert/fullchain.pem'),
|
||||
key: fs.readFileSync('dev-server/dev-server.key'),
|
||||
cert: fs.readFileSync('dev-server/dev-server.crt'),
|
||||
root: '..',
|
||||
|
|
|
@ -67113,7 +67113,7 @@ var require_blazeface = __commonJS((exports) => {
|
|||
}
|
||||
const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize);
|
||||
const logits = tf2.slice(prediction, [0, 0], [-1, 1]);
|
||||
const scoresOut = logits.squeeze();
|
||||
const scoresOut = tf2.sigmoid(logits).squeeze();
|
||||
return [prediction, decodedBounds, scoresOut];
|
||||
});
|
||||
const boxIndicesTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.config.detector.maxFaces, this.config.detector.iouThreshold, this.config.detector.scoreThreshold);
|
||||
|
@ -67454,7 +67454,7 @@ var require_facepipeline = __commonJS((exports) => {
|
|||
this.meshWidth = config.mesh.inputSize;
|
||||
this.meshHeight = config.mesh.inputSize;
|
||||
this.irisSize = config.iris.inputSize;
|
||||
this.irisEnlarge = config.iris.enlargeFactor;
|
||||
this.irisEnlarge = 2.3;
|
||||
}
|
||||
transformRawCoords(rawCoords, box, angle, rotationMatrix) {
|
||||
const boxSize = bounding.getBoxSize({startPoint: box.startPoint, endPoint: box.endPoint});
|
||||
|
@ -70878,9 +70878,9 @@ var require_profile = __commonJS((exports) => {
|
|||
largest.length = maxResults;
|
||||
const res = {newBytes: data.newBytes, newTensors: data.newTensors, peakBytes: data.peakBytes, numKernelOps: data.kernels.length, timeKernelOps: time, slowestKernelOps: slowest, largestKernelOps: largest};
|
||||
profileData[name] = res;
|
||||
console.log("Human profiler", name, res);
|
||||
}
|
||||
exports.run = profile2;
|
||||
exports.data = profileData;
|
||||
});
|
||||
var require_age = __commonJS((exports) => {
|
||||
const tf2 = require_tf_node();
|
||||
|
@ -71070,7 +71070,7 @@ var require_emotion = __commonJS((exports) => {
|
|||
data = emotionT.dataSync();
|
||||
tf2.dispose(emotionT);
|
||||
} else {
|
||||
const profileData = await tf2.profile(() => models.emotion.predict(grayscale));
|
||||
const profileData = await tf2.profile(() => models.emotion.predict(normalize));
|
||||
data = profileData.result.dataSync();
|
||||
profileData.result.dispose();
|
||||
profile2.run("emotion", profileData);
|
||||
|
@ -71544,25 +71544,25 @@ var require_modelPoseNet = __commonJS((exports) => {
|
|||
class PoseNet {
|
||||
constructor(net) {
|
||||
this.baseModel = net;
|
||||
this.outputStride = 16;
|
||||
}
|
||||
async estimatePoses(input, config) {
|
||||
return new Promise(async (resolve) => {
|
||||
const outputStride = config.outputStride;
|
||||
const height = input.shape[1];
|
||||
const width = input.shape[2];
|
||||
const resized = util.resizeTo(input, [config.inputResolution, config.inputResolution]);
|
||||
const {heatmapScores, offsets, displacementFwd, displacementBwd} = this.baseModel.predict(resized);
|
||||
const allTensorBuffers = await util.toTensorBuffers3D([heatmapScores, offsets, displacementFwd, displacementBwd]);
|
||||
const resized = util.resizeTo(input, [config.body.inputResolution, config.body.inputResolution]);
|
||||
const res = this.baseModel.predict(resized);
|
||||
const allTensorBuffers = await util.toTensorBuffers3D([res.heatmapScores, res.offsets, res.displacementFwd, res.displacementBwd]);
|
||||
const scoresBuffer = allTensorBuffers[0];
|
||||
const offsetsBuffer = allTensorBuffers[1];
|
||||
const displacementsFwdBuffer = allTensorBuffers[2];
|
||||
const displacementsBwdBuffer = allTensorBuffers[3];
|
||||
const poses = await decodeMultiple.decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, outputStride, config.maxDetections, config.scoreThreshold, config.nmsRadius);
|
||||
const resultPoses = util.scaleAndFlipPoses(poses, [height, width], [config.inputResolution, config.inputResolution]);
|
||||
heatmapScores.dispose();
|
||||
offsets.dispose();
|
||||
displacementFwd.dispose();
|
||||
displacementBwd.dispose();
|
||||
const poses = await decodeMultiple.decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, this.outputStride, config.body.maxDetections, config.body.scoreThreshold, config.body.nmsRadius);
|
||||
const resultPoses = util.scaleAndFlipPoses(poses, [height, width], [config.body.inputResolution, config.body.inputResolution]);
|
||||
res.heatmapScores.dispose();
|
||||
res.offsets.dispose();
|
||||
res.displacementFwd.dispose();
|
||||
res.displacementBwd.dispose();
|
||||
resized.dispose();
|
||||
resolve(resultPoses);
|
||||
});
|
||||
|
@ -71572,14 +71572,11 @@ var require_modelPoseNet = __commonJS((exports) => {
|
|||
}
|
||||
}
|
||||
exports.PoseNet = PoseNet;
|
||||
async function loadMobileNet(config) {
|
||||
const graphModel = await tf2.loadGraphModel(config.modelPath);
|
||||
const mobilenet = new modelMobileNet.MobileNet(graphModel, config.outputStride);
|
||||
console.log(`Human: load model: ${config.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
return new PoseNet(mobilenet);
|
||||
}
|
||||
async function load(config) {
|
||||
return loadMobileNet(config);
|
||||
const graphModel = await tf2.loadGraphModel(config.body.modelPath);
|
||||
const mobilenet = new modelMobileNet.MobileNet(graphModel, this.outputStride);
|
||||
console.log(`Human: load model: ${config.body.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
return new PoseNet(mobilenet);
|
||||
}
|
||||
exports.load = load;
|
||||
});
|
||||
|
@ -90691,7 +90688,6 @@ var require_config = __commonJS((exports) => {
|
|||
iris: {
|
||||
enabled: true,
|
||||
modelPath: "../models/iris.json",
|
||||
enlargeFactor: 2.3,
|
||||
inputSize: 64
|
||||
},
|
||||
age: {
|
||||
|
@ -90719,7 +90715,6 @@ var require_config = __commonJS((exports) => {
|
|||
enabled: true,
|
||||
modelPath: "../models/posenet.json",
|
||||
inputResolution: 257,
|
||||
outputStride: 16,
|
||||
maxDetections: 10,
|
||||
scoreThreshold: 0.8,
|
||||
nmsRadius: 20
|
||||
|
@ -90731,7 +90726,6 @@ var require_config = __commonJS((exports) => {
|
|||
minConfidence: 0.5,
|
||||
iouThreshold: 0.1,
|
||||
scoreThreshold: 0.8,
|
||||
enlargeFactor: 1.65,
|
||||
maxHands: 1,
|
||||
landmarks: true,
|
||||
detector: {
|
||||
|
@ -90937,14 +90931,14 @@ class Human {
|
|||
this.models.gender || gender.load(this.config),
|
||||
this.models.emotion || emotion.load(this.config),
|
||||
this.models.facemesh || facemesh.load(this.config.face),
|
||||
this.models.posenet || posenet.load(this.config.body),
|
||||
this.models.posenet || posenet.load(this.config),
|
||||
this.models.handpose || handpose.load(this.config.hand)
|
||||
]);
|
||||
} else {
|
||||
if (this.config.face.enabled && !this.models.facemesh)
|
||||
this.models.facemesh = await facemesh.load(this.config.face);
|
||||
if (this.config.body.enabled && !this.models.posenet)
|
||||
this.models.posenet = await posenet.load(this.config.body);
|
||||
this.models.posenet = await posenet.load(this.config);
|
||||
if (this.config.hand.enabled && !this.models.handpose)
|
||||
this.models.handpose = await handpose.load(this.config.hand);
|
||||
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age)
|
||||
|
@ -91090,13 +91084,13 @@ class Human {
|
|||
}
|
||||
this.analyze("Start Body:");
|
||||
if (this.config.async) {
|
||||
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process2.tensor, this.config.body) : [];
|
||||
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process2.tensor, this.config) : [];
|
||||
if (this.perf.body)
|
||||
delete this.perf.body;
|
||||
} else {
|
||||
this.state = "run:body";
|
||||
timeStamp2 = now();
|
||||
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process2.tensor, this.config.body) : [];
|
||||
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process2.tensor, this.config) : [];
|
||||
this.perf.body = Math.trunc(now() - timeStamp2);
|
||||
}
|
||||
this.analyze("End Body:");
|
||||
|
@ -91362,7 +91356,7 @@ function createCSS() {
|
|||
.menu-item { display: flex; white-space: nowrap; padding: 0.2rem; width: max-content; cursor: default; }
|
||||
.menu-title { text-align: right; cursor: pointer; }
|
||||
.menu-hr { margin: 0.2rem; border: 1px solid rgba(0, 0, 0, 0.5) }
|
||||
.menu-label { padding: 0; }
|
||||
.menu-label { padding: 0; font-weight: 800; }
|
||||
|
||||
.menu-list { margin-right: 0.8rem; }
|
||||
select:focus { outline: none; }
|
||||
|
@ -91641,6 +91635,7 @@ const ui = {
|
|||
baseFontProto: 'small-caps {size} "Segoe UI"',
|
||||
baseLineWidth: 12,
|
||||
baseLineHeightProto: 2,
|
||||
crop: true,
|
||||
columns: 2,
|
||||
busy: false,
|
||||
facing: true,
|
||||
|
@ -91650,7 +91645,7 @@ const ui = {
|
|||
drawBoxes: true,
|
||||
drawPoints: false,
|
||||
drawPolygons: true,
|
||||
fillPolygons: true,
|
||||
fillPolygons: false,
|
||||
useDepth: true,
|
||||
console: true,
|
||||
maxFrames: 10,
|
||||
|
@ -91746,7 +91741,7 @@ ${msg}`;
|
|||
audio: false,
|
||||
video: {
|
||||
facingMode: ui.facing ? "user" : "environment",
|
||||
resizeMode: "none",
|
||||
resizeMode: ui.crop ? "crop-and-scale" : "none",
|
||||
width: {ideal: window.innerWidth},
|
||||
height: {ideal: window.innerHeight}
|
||||
}
|
||||
|
@ -91814,7 +91809,9 @@ function runHumanDetect(input, canvas) {
|
|||
timeStamp = performance.now();
|
||||
const live = input.srcObject && input.srcObject.getVideoTracks()[0].readyState === "live" && input.readyState > 2 && !input.paused;
|
||||
if (!live && input.srcObject) {
|
||||
if (input.srcObject.getVideoTracks()[0].readyState === "live" && input.readyState <= 2)
|
||||
if (input.paused)
|
||||
log("camera paused");
|
||||
else if (input.srcObject.getVideoTracks()[0].readyState === "live" && input.readyState <= 2)
|
||||
setTimeout(() => runHumanDetect(input, canvas), 500);
|
||||
else
|
||||
log(`camera not ready: track state: ${(_a = input.srcObject) == null ? void 0 : _a.getVideoTracks()[0].readyState} stream state: ${input.readyState}`);
|
||||
|
@ -91833,8 +91830,6 @@ function runHumanDetect(input, canvas) {
|
|||
log(result.error);
|
||||
else
|
||||
drawResults(input, result, canvas);
|
||||
if (human.config.profile)
|
||||
log("profile data:", human.profile());
|
||||
});
|
||||
}
|
||||
}
|
||||
|
@ -91904,78 +91899,79 @@ function setupMenu() {
|
|||
menu2.addButton("process images", "process images", () => detectSampleImages());
|
||||
document.getElementById("play").addEventListener("click", () => btn.click());
|
||||
menu2.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||
menu2.addList("Backend", ["cpu", "webgl", "wasm", "webgpu"], human.config.backend, (val) => human.config.backend = val);
|
||||
menu2.addBool("Async Operations", human.config, "async", (val) => human.config.async = val);
|
||||
menu2.addBool("Enable Profiler", human.config, "profile", (val) => human.config.profile = val);
|
||||
menu2.addBool("Memory Shield", human.config, "deallocate", (val) => human.config.deallocate = val);
|
||||
menu2.addBool("Use Web Worker", ui, "useWorker");
|
||||
menu2.addList("backend", ["cpu", "webgl", "wasm", "webgpu"], human.config.backend, (val) => human.config.backend = val);
|
||||
menu2.addBool("async operations", human.config, "async", (val) => human.config.async = val);
|
||||
menu2.addBool("enable profiler", human.config, "profile", (val) => human.config.profile = val);
|
||||
menu2.addBool("memory shield", human.config, "deallocate", (val) => human.config.deallocate = val);
|
||||
menu2.addBool("use web worker", ui, "useWorker");
|
||||
menu2.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||
menu2.addLabel("Enabled Models");
|
||||
menu2.addBool("Face Detect", human.config.face, "enabled");
|
||||
menu2.addBool("Face Mesh", human.config.face.mesh, "enabled");
|
||||
menu2.addBool("Face Iris", human.config.face.iris, "enabled");
|
||||
menu2.addBool("Face Age", human.config.face.age, "enabled");
|
||||
menu2.addBool("Face Gender", human.config.face.gender, "enabled");
|
||||
menu2.addBool("Face Emotion", human.config.face.emotion, "enabled");
|
||||
menu2.addBool("Body Pose", human.config.body, "enabled");
|
||||
menu2.addBool("Hand Pose", human.config.hand, "enabled");
|
||||
menu2.addBool("Gesture Analysis", human.config.gesture, "enabled");
|
||||
menu2.addLabel("enabled models");
|
||||
menu2.addBool("face detect", human.config.face, "enabled");
|
||||
menu2.addBool("face mesh", human.config.face.mesh, "enabled");
|
||||
menu2.addBool("face iris", human.config.face.iris, "enabled");
|
||||
menu2.addBool("face age", human.config.face.age, "enabled");
|
||||
menu2.addBool("face gender", human.config.face.gender, "enabled");
|
||||
menu2.addBool("face emotion", human.config.face.emotion, "enabled");
|
||||
menu2.addBool("body pose", human.config.body, "enabled");
|
||||
menu2.addBool("hand pose", human.config.hand, "enabled");
|
||||
menu2.addBool("gesture analysis", human.config.gesture, "enabled");
|
||||
menu2.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||
menu2.addLabel("Model Parameters");
|
||||
menu2.addRange("Max Objects", human.config.face.detector, "maxFaces", 1, 50, 1, (val) => {
|
||||
menu2.addLabel("model parameters");
|
||||
menu2.addRange("max objects", human.config.face.detector, "maxFaces", 1, 50, 1, (val) => {
|
||||
human.config.face.detector.maxFaces = parseInt(val);
|
||||
human.config.body.maxDetections = parseInt(val);
|
||||
human.config.hand.maxHands = parseInt(val);
|
||||
});
|
||||
menu2.addRange("Skip Frames", human.config.face.detector, "skipFrames", 0, 50, 1, (val) => {
|
||||
menu2.addRange("skip frames", human.config.face.detector, "skipFrames", 0, 50, 1, (val) => {
|
||||
human.config.face.detector.skipFrames = parseInt(val);
|
||||
human.config.face.emotion.skipFrames = parseInt(val);
|
||||
human.config.face.age.skipFrames = parseInt(val);
|
||||
human.config.hand.skipFrames = parseInt(val);
|
||||
});
|
||||
menu2.addRange("Min Confidence", human.config.face.detector, "minConfidence", 0, 1, 0.05, (val) => {
|
||||
menu2.addRange("min confidence", human.config.face.detector, "minConfidence", 0, 1, 0.05, (val) => {
|
||||
human.config.face.detector.minConfidence = parseFloat(val);
|
||||
human.config.face.gender.minConfidence = parseFloat(val);
|
||||
human.config.face.emotion.minConfidence = parseFloat(val);
|
||||
human.config.hand.minConfidence = parseFloat(val);
|
||||
});
|
||||
menu2.addRange("Score Threshold", human.config.face.detector, "scoreThreshold", 0.1, 1, 0.05, (val) => {
|
||||
menu2.addRange("score threshold", human.config.face.detector, "scoreThreshold", 0.1, 1, 0.05, (val) => {
|
||||
human.config.face.detector.scoreThreshold = parseFloat(val);
|
||||
human.config.hand.scoreThreshold = parseFloat(val);
|
||||
human.config.body.scoreThreshold = parseFloat(val);
|
||||
});
|
||||
menu2.addRange("IOU Threshold", human.config.face.detector, "iouThreshold", 0.1, 1, 0.05, (val) => {
|
||||
menu2.addRange("overlap", human.config.face.detector, "iouThreshold", 0.1, 1, 0.05, (val) => {
|
||||
human.config.face.detector.iouThreshold = parseFloat(val);
|
||||
human.config.hand.iouThreshold = parseFloat(val);
|
||||
});
|
||||
menu2.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||
menu2.addChart("FPS", "FPS");
|
||||
menuFX = new menu_default(document.body, "...", {top: "1rem", right: "18rem"});
|
||||
menuFX.addLabel("UI Options");
|
||||
menuFX.addBool("Camera Front/Back", ui, "facing", () => setupCamera());
|
||||
menuFX.addBool("Use 3D Depth", ui, "useDepth");
|
||||
menuFX.addBool("Draw Boxes", ui, "drawBoxes");
|
||||
menuFX.addBool("Draw Points", ui, "drawPoints");
|
||||
menuFX.addBool("Draw Polygons", ui, "drawPolygons");
|
||||
menuFX.addLabel("ui options");
|
||||
menuFX.addBool("crop & scale", ui, "crop", () => setupCamera());
|
||||
menuFX.addBool("camera front/back", ui, "facing", () => setupCamera());
|
||||
menuFX.addBool("use 3D depth", ui, "useDepth");
|
||||
menuFX.addBool("draw boxes", ui, "drawBoxes");
|
||||
menuFX.addBool("draw polygons", ui, "drawPolygons");
|
||||
menuFX.addBool("Fill Polygons", ui, "fillPolygons");
|
||||
menuFX.addBool("draw points", ui, "drawPoints");
|
||||
menuFX.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
|
||||
menuFX.addLabel("Image Processing");
|
||||
menuFX.addBool("Enabled", human.config.filter, "enabled");
|
||||
ui.menuWidth = menuFX.addRange("Image width", human.config.filter, "width", 0, 3840, 10, (val) => human.config.filter.width = parseInt(val));
|
||||
ui.menuHeight = menuFX.addRange("Image height", human.config.filter, "height", 0, 2160, 10, (val) => human.config.filter.height = parseInt(val));
|
||||
menuFX.addRange("Brightness", human.config.filter, "brightness", -1, 1, 0.05, (val) => human.config.filter.brightness = parseFloat(val));
|
||||
menuFX.addRange("Contrast", human.config.filter, "contrast", -1, 1, 0.05, (val) => human.config.filter.contrast = parseFloat(val));
|
||||
menuFX.addRange("Sharpness", human.config.filter, "sharpness", 0, 1, 0.05, (val) => human.config.filter.sharpness = parseFloat(val));
|
||||
menuFX.addRange("Blur", human.config.filter, "blur", 0, 20, 1, (val) => human.config.filter.blur = parseInt(val));
|
||||
menuFX.addRange("Saturation", human.config.filter, "saturation", -1, 1, 0.05, (val) => human.config.filter.saturation = parseFloat(val));
|
||||
menuFX.addRange("Hue", human.config.filter, "hue", 0, 360, 5, (val) => human.config.filter.hue = parseInt(val));
|
||||
menuFX.addRange("Pixelate", human.config.filter, "pixelate", 0, 32, 1, (val) => human.config.filter.pixelate = parseInt(val));
|
||||
menuFX.addBool("Negative", human.config.filter, "negative");
|
||||
menuFX.addBool("Sepia", human.config.filter, "sepia");
|
||||
menuFX.addBool("Vintage", human.config.filter, "vintage");
|
||||
menuFX.addBool("Kodachrome", human.config.filter, "kodachrome");
|
||||
menuFX.addBool("Technicolor", human.config.filter, "technicolor");
|
||||
menuFX.addBool("Polaroid", human.config.filter, "polaroid");
|
||||
menuFX.addLabel("image processing");
|
||||
menuFX.addBool("enabled", human.config.filter, "enabled");
|
||||
ui.menuWidth = menuFX.addRange("image width", human.config.filter, "width", 0, 3840, 10, (val) => human.config.filter.width = parseInt(val));
|
||||
ui.menuHeight = menuFX.addRange("image height", human.config.filter, "height", 0, 2160, 10, (val) => human.config.filter.height = parseInt(val));
|
||||
menuFX.addRange("brightness", human.config.filter, "brightness", -1, 1, 0.05, (val) => human.config.filter.brightness = parseFloat(val));
|
||||
menuFX.addRange("contrast", human.config.filter, "contrast", -1, 1, 0.05, (val) => human.config.filter.contrast = parseFloat(val));
|
||||
menuFX.addRange("sharpness", human.config.filter, "sharpness", 0, 1, 0.05, (val) => human.config.filter.sharpness = parseFloat(val));
|
||||
menuFX.addRange("blur", human.config.filter, "blur", 0, 20, 1, (val) => human.config.filter.blur = parseInt(val));
|
||||
menuFX.addRange("saturation", human.config.filter, "saturation", -1, 1, 0.05, (val) => human.config.filter.saturation = parseFloat(val));
|
||||
menuFX.addRange("hue", human.config.filter, "hue", 0, 360, 5, (val) => human.config.filter.hue = parseInt(val));
|
||||
menuFX.addRange("pixelate", human.config.filter, "pixelate", 0, 32, 1, (val) => human.config.filter.pixelate = parseInt(val));
|
||||
menuFX.addBool("negative", human.config.filter, "negative");
|
||||
menuFX.addBool("sepia", human.config.filter, "sepia");
|
||||
menuFX.addBool("vintage", human.config.filter, "vintage");
|
||||
menuFX.addBool("kodachrome", human.config.filter, "kodachrome");
|
||||
menuFX.addBool("technicolor", human.config.filter, "technicolor");
|
||||
menuFX.addBool("polaroid", human.config.filter, "polaroid");
|
||||
}
|
||||
async function main() {
|
||||
log("Human: demo starting ...");
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"inputs": {
|
||||
"demo/browser.js": {
|
||||
"bytes": 18278,
|
||||
"bytes": 18361,
|
||||
"imports": [
|
||||
{
|
||||
"path": "dist/human.esm.js"
|
||||
|
@ -19,11 +19,11 @@
|
|||
"imports": []
|
||||
},
|
||||
"demo/menu.js": {
|
||||
"bytes": 12707,
|
||||
"bytes": 12725,
|
||||
"imports": []
|
||||
},
|
||||
"dist/human.esm.js": {
|
||||
"bytes": 3196462,
|
||||
"bytes": 3196296,
|
||||
"imports": []
|
||||
}
|
||||
},
|
||||
|
@ -31,25 +31,25 @@
|
|||
"dist/demo-browser-index.js.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 5560779
|
||||
"bytes": 5559698
|
||||
},
|
||||
"dist/demo-browser-index.js": {
|
||||
"imports": [],
|
||||
"inputs": {
|
||||
"dist/human.esm.js": {
|
||||
"bytesInOutput": 3194325
|
||||
"bytesInOutput": 3194159
|
||||
},
|
||||
"demo/draw.js": {
|
||||
"bytesInOutput": 7453
|
||||
},
|
||||
"demo/menu.js": {
|
||||
"bytesInOutput": 12709
|
||||
"bytesInOutput": 12727
|
||||
},
|
||||
"demo/browser.js": {
|
||||
"bytesInOutput": 16220
|
||||
"bytesInOutput": 16301
|
||||
}
|
||||
},
|
||||
"bytes": 3230829
|
||||
"bytes": 3230762
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -67152,7 +67152,7 @@ var require_blazeface = __commonJS((exports) => {
|
|||
}
|
||||
const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize);
|
||||
const logits = tf2.slice(prediction, [0, 0], [-1, 1]);
|
||||
const scoresOut = logits.squeeze();
|
||||
const scoresOut = tf2.sigmoid(logits).squeeze();
|
||||
return [prediction, decodedBounds, scoresOut];
|
||||
});
|
||||
const boxIndicesTensor = await tf2.image.nonMaxSuppressionAsync(boxes, scores, this.config.detector.maxFaces, this.config.detector.iouThreshold, this.config.detector.scoreThreshold);
|
||||
|
@ -67501,7 +67501,7 @@ var require_facepipeline = __commonJS((exports) => {
|
|||
this.meshWidth = config.mesh.inputSize;
|
||||
this.meshHeight = config.mesh.inputSize;
|
||||
this.irisSize = config.iris.inputSize;
|
||||
this.irisEnlarge = config.iris.enlargeFactor;
|
||||
this.irisEnlarge = 2.3;
|
||||
}
|
||||
transformRawCoords(rawCoords, box, angle, rotationMatrix) {
|
||||
const boxSize = bounding.getBoxSize({startPoint: box.startPoint, endPoint: box.endPoint});
|
||||
|
@ -70933,9 +70933,9 @@ var require_profile = __commonJS((exports) => {
|
|||
largest.length = maxResults;
|
||||
const res = {newBytes: data.newBytes, newTensors: data.newTensors, peakBytes: data.peakBytes, numKernelOps: data.kernels.length, timeKernelOps: time, slowestKernelOps: slowest, largestKernelOps: largest};
|
||||
profileData[name] = res;
|
||||
console.log("Human profiler", name, res);
|
||||
}
|
||||
exports.run = profile2;
|
||||
exports.data = profileData;
|
||||
});
|
||||
|
||||
// src/age/age.js
|
||||
|
@ -71131,7 +71131,7 @@ var require_emotion = __commonJS((exports) => {
|
|||
data = emotionT.dataSync();
|
||||
tf2.dispose(emotionT);
|
||||
} else {
|
||||
const profileData = await tf2.profile(() => models.emotion.predict(grayscale));
|
||||
const profileData = await tf2.profile(() => models.emotion.predict(normalize));
|
||||
data = profileData.result.dataSync();
|
||||
profileData.result.dispose();
|
||||
profile2.run("emotion", profileData);
|
||||
|
@ -71625,25 +71625,25 @@ var require_modelPoseNet = __commonJS((exports) => {
|
|||
class PoseNet {
|
||||
constructor(net) {
|
||||
this.baseModel = net;
|
||||
this.outputStride = 16;
|
||||
}
|
||||
async estimatePoses(input, config) {
|
||||
return new Promise(async (resolve) => {
|
||||
const outputStride = config.outputStride;
|
||||
const height = input.shape[1];
|
||||
const width = input.shape[2];
|
||||
const resized = util.resizeTo(input, [config.inputResolution, config.inputResolution]);
|
||||
const {heatmapScores, offsets, displacementFwd, displacementBwd} = this.baseModel.predict(resized);
|
||||
const allTensorBuffers = await util.toTensorBuffers3D([heatmapScores, offsets, displacementFwd, displacementBwd]);
|
||||
const resized = util.resizeTo(input, [config.body.inputResolution, config.body.inputResolution]);
|
||||
const res = this.baseModel.predict(resized);
|
||||
const allTensorBuffers = await util.toTensorBuffers3D([res.heatmapScores, res.offsets, res.displacementFwd, res.displacementBwd]);
|
||||
const scoresBuffer = allTensorBuffers[0];
|
||||
const offsetsBuffer = allTensorBuffers[1];
|
||||
const displacementsFwdBuffer = allTensorBuffers[2];
|
||||
const displacementsBwdBuffer = allTensorBuffers[3];
|
||||
const poses = await decodeMultiple.decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, outputStride, config.maxDetections, config.scoreThreshold, config.nmsRadius);
|
||||
const resultPoses = util.scaleAndFlipPoses(poses, [height, width], [config.inputResolution, config.inputResolution]);
|
||||
heatmapScores.dispose();
|
||||
offsets.dispose();
|
||||
displacementFwd.dispose();
|
||||
displacementBwd.dispose();
|
||||
const poses = await decodeMultiple.decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, this.outputStride, config.body.maxDetections, config.body.scoreThreshold, config.body.nmsRadius);
|
||||
const resultPoses = util.scaleAndFlipPoses(poses, [height, width], [config.body.inputResolution, config.body.inputResolution]);
|
||||
res.heatmapScores.dispose();
|
||||
res.offsets.dispose();
|
||||
res.displacementFwd.dispose();
|
||||
res.displacementBwd.dispose();
|
||||
resized.dispose();
|
||||
resolve(resultPoses);
|
||||
});
|
||||
|
@ -71653,14 +71653,11 @@ var require_modelPoseNet = __commonJS((exports) => {
|
|||
}
|
||||
}
|
||||
exports.PoseNet = PoseNet;
|
||||
async function loadMobileNet(config) {
|
||||
const graphModel = await tf2.loadGraphModel(config.modelPath);
|
||||
const mobilenet = new modelMobileNet.MobileNet(graphModel, config.outputStride);
|
||||
console.log(`Human: load model: ${config.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
return new PoseNet(mobilenet);
|
||||
}
|
||||
async function load(config) {
|
||||
return loadMobileNet(config);
|
||||
const graphModel = await tf2.loadGraphModel(config.body.modelPath);
|
||||
const mobilenet = new modelMobileNet.MobileNet(graphModel, this.outputStride);
|
||||
console.log(`Human: load model: ${config.body.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
return new PoseNet(mobilenet);
|
||||
}
|
||||
exports.load = load;
|
||||
});
|
||||
|
@ -90794,7 +90791,6 @@ var require_config = __commonJS((exports) => {
|
|||
iris: {
|
||||
enabled: true,
|
||||
modelPath: "../models/iris.json",
|
||||
enlargeFactor: 2.3,
|
||||
inputSize: 64
|
||||
},
|
||||
age: {
|
||||
|
@ -90822,7 +90818,6 @@ var require_config = __commonJS((exports) => {
|
|||
enabled: true,
|
||||
modelPath: "../models/posenet.json",
|
||||
inputResolution: 257,
|
||||
outputStride: 16,
|
||||
maxDetections: 10,
|
||||
scoreThreshold: 0.8,
|
||||
nmsRadius: 20
|
||||
|
@ -90834,7 +90829,6 @@ var require_config = __commonJS((exports) => {
|
|||
minConfidence: 0.5,
|
||||
iouThreshold: 0.1,
|
||||
scoreThreshold: 0.8,
|
||||
enlargeFactor: 1.65,
|
||||
maxHands: 1,
|
||||
landmarks: true,
|
||||
detector: {
|
||||
|
@ -91044,14 +91038,14 @@ class Human {
|
|||
this.models.gender || gender.load(this.config),
|
||||
this.models.emotion || emotion.load(this.config),
|
||||
this.models.facemesh || facemesh.load(this.config.face),
|
||||
this.models.posenet || posenet.load(this.config.body),
|
||||
this.models.posenet || posenet.load(this.config),
|
||||
this.models.handpose || handpose.load(this.config.hand)
|
||||
]);
|
||||
} else {
|
||||
if (this.config.face.enabled && !this.models.facemesh)
|
||||
this.models.facemesh = await facemesh.load(this.config.face);
|
||||
if (this.config.body.enabled && !this.models.posenet)
|
||||
this.models.posenet = await posenet.load(this.config.body);
|
||||
this.models.posenet = await posenet.load(this.config);
|
||||
if (this.config.hand.enabled && !this.models.handpose)
|
||||
this.models.handpose = await handpose.load(this.config.hand);
|
||||
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age)
|
||||
|
@ -91197,13 +91191,13 @@ class Human {
|
|||
}
|
||||
this.analyze("Start Body:");
|
||||
if (this.config.async) {
|
||||
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process2.tensor, this.config.body) : [];
|
||||
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process2.tensor, this.config) : [];
|
||||
if (this.perf.body)
|
||||
delete this.perf.body;
|
||||
} else {
|
||||
this.state = "run:body";
|
||||
timeStamp = now();
|
||||
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process2.tensor, this.config.body) : [];
|
||||
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process2.tensor, this.config) : [];
|
||||
this.perf.body = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
this.analyze("End Body:");
|
||||
|
|
File diff suppressed because one or more lines are too long
|
@ -1,7 +1,7 @@
|
|||
{
|
||||
"inputs": {
|
||||
"config.js": {
|
||||
"bytes": 7744,
|
||||
"bytes": 8366,
|
||||
"imports": []
|
||||
},
|
||||
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
|
||||
|
@ -224,7 +224,7 @@
|
|||
]
|
||||
},
|
||||
"src/body/modelPoseNet.js": {
|
||||
"bytes": 3658,
|
||||
"bytes": 1946,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -261,7 +261,7 @@
|
|||
]
|
||||
},
|
||||
"src/body/util.js": {
|
||||
"bytes": 2260,
|
||||
"bytes": 2262,
|
||||
"imports": [
|
||||
{
|
||||
"path": "src/body/keypoints.js"
|
||||
|
@ -288,7 +288,7 @@
|
|||
]
|
||||
},
|
||||
"src/face/blazeface.js": {
|
||||
"bytes": 7226,
|
||||
"bytes": 7058,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -327,7 +327,7 @@
|
|||
]
|
||||
},
|
||||
"src/face/facepipeline.js": {
|
||||
"bytes": 14774,
|
||||
"bytes": 14752,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -387,7 +387,7 @@
|
|||
]
|
||||
},
|
||||
"src/hand/handdetector.js": {
|
||||
"bytes": 4229,
|
||||
"bytes": 4230,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -433,7 +433,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytes": 13727,
|
||||
"bytes": 13707,
|
||||
"imports": [
|
||||
{
|
||||
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
|
||||
|
@ -489,7 +489,7 @@
|
|||
"imports": []
|
||||
},
|
||||
"src/profile.js": {
|
||||
"bytes": 1004,
|
||||
"bytes": 1061,
|
||||
"imports": []
|
||||
},
|
||||
"empty:/home/vlado/dev/human/node_modules/node-fetch/browser.js": {
|
||||
|
@ -513,7 +513,7 @@
|
|||
"dist/human.esm.js.map": {
|
||||
"imports": [],
|
||||
"inputs": {},
|
||||
"bytes": 5609744
|
||||
"bytes": 5608490
|
||||
},
|
||||
"dist/human.esm.js": {
|
||||
"imports": [],
|
||||
|
@ -576,7 +576,7 @@
|
|||
"bytesInOutput": 3025
|
||||
},
|
||||
"src/face/blazeface.js": {
|
||||
"bytesInOutput": 7099
|
||||
"bytesInOutput": 7112
|
||||
},
|
||||
"src/face/keypoints.js": {
|
||||
"bytesInOutput": 2768
|
||||
|
@ -588,7 +588,7 @@
|
|||
"bytesInOutput": 3017
|
||||
},
|
||||
"src/face/facepipeline.js": {
|
||||
"bytesInOutput": 13567
|
||||
"bytesInOutput": 13545
|
||||
},
|
||||
"src/face/uvcoords.js": {
|
||||
"bytesInOutput": 20584
|
||||
|
@ -600,7 +600,7 @@
|
|||
"bytesInOutput": 2590
|
||||
},
|
||||
"src/profile.js": {
|
||||
"bytesInOutput": 1092
|
||||
"bytesInOutput": 1108
|
||||
},
|
||||
"src/age/age.js": {
|
||||
"bytesInOutput": 1843
|
||||
|
@ -639,7 +639,7 @@
|
|||
"bytesInOutput": 2398
|
||||
},
|
||||
"src/body/modelPoseNet.js": {
|
||||
"bytesInOutput": 2100
|
||||
"bytesInOutput": 2026
|
||||
},
|
||||
"src/body/posenet.js": {
|
||||
"bytesInOutput": 903
|
||||
|
@ -672,19 +672,19 @@
|
|||
"bytesInOutput": 4482
|
||||
},
|
||||
"config.js": {
|
||||
"bytesInOutput": 2299
|
||||
"bytesInOutput": 2220
|
||||
},
|
||||
"package.json": {
|
||||
"bytesInOutput": 3561
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 11575
|
||||
"bytesInOutput": 11555
|
||||
},
|
||||
"src/human.js": {
|
||||
"bytesInOutput": 0
|
||||
}
|
||||
},
|
||||
"bytes": 3196462
|
||||
"bytes": 3196296
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -6,47 +6,26 @@ const util = require('./util');
|
|||
class PoseNet {
|
||||
constructor(net) {
|
||||
this.baseModel = net;
|
||||
this.outputStride = 16;
|
||||
}
|
||||
|
||||
/**
|
||||
* Infer through PoseNet, and estimates multiple poses using the outputs.
|
||||
* This does standard ImageNet pre-processing before inferring through the
|
||||
* model. The image should pixels should have values [0-255]. It detects
|
||||
* multiple poses and finds their parts from part scores and displacement
|
||||
* vectors using a fast greedy decoding algorithm. It returns up to
|
||||
* `config.maxDetections` object instance detections in decreasing root
|
||||
* score order.
|
||||
*
|
||||
* @param input
|
||||
* ImageData|HTMLImageElement|HTMLCanvasElement|HTMLVideoElement) The input
|
||||
* image to feed through the network.
|
||||
*
|
||||
* @param config MultiPoseEstimationConfig object that contains parameters
|
||||
* for the PoseNet inference using multiple pose estimation.
|
||||
*
|
||||
* @return An array of poses and their scores, each containing keypoints and
|
||||
* the corresponding keypoint scores. The positions of the keypoints are
|
||||
* in the same scale as the original image
|
||||
*/
|
||||
async estimatePoses(input, config) {
|
||||
return new Promise(async (resolve) => {
|
||||
const outputStride = config.outputStride;
|
||||
// const inputResolution = config.inputResolution;
|
||||
const height = input.shape[1];
|
||||
const width = input.shape[2];
|
||||
const resized = util.resizeTo(input, [config.inputResolution, config.inputResolution]);
|
||||
const { heatmapScores, offsets, displacementFwd, displacementBwd } = this.baseModel.predict(resized);
|
||||
const allTensorBuffers = await util.toTensorBuffers3D([heatmapScores, offsets, displacementFwd, displacementBwd]);
|
||||
const resized = util.resizeTo(input, [config.body.inputResolution, config.body.inputResolution]);
|
||||
const res = this.baseModel.predict(resized);
|
||||
const allTensorBuffers = await util.toTensorBuffers3D([res.heatmapScores, res.offsets, res.displacementFwd, res.displacementBwd]);
|
||||
const scoresBuffer = allTensorBuffers[0];
|
||||
const offsetsBuffer = allTensorBuffers[1];
|
||||
const displacementsFwdBuffer = allTensorBuffers[2];
|
||||
const displacementsBwdBuffer = allTensorBuffers[3];
|
||||
const poses = await decodeMultiple.decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, outputStride, config.maxDetections, config.scoreThreshold, config.nmsRadius);
|
||||
const resultPoses = util.scaleAndFlipPoses(poses, [height, width], [config.inputResolution, config.inputResolution]);
|
||||
heatmapScores.dispose();
|
||||
offsets.dispose();
|
||||
displacementFwd.dispose();
|
||||
displacementBwd.dispose();
|
||||
const poses = await decodeMultiple.decodeMultiplePoses(scoresBuffer, offsetsBuffer, displacementsFwdBuffer, displacementsBwdBuffer, this.outputStride, config.body.maxDetections, config.body.scoreThreshold, config.body.nmsRadius);
|
||||
const resultPoses = util.scaleAndFlipPoses(poses, [height, width], [config.body.inputResolution, config.body.inputResolution]);
|
||||
res.heatmapScores.dispose();
|
||||
res.offsets.dispose();
|
||||
res.displacementFwd.dispose();
|
||||
res.displacementBwd.dispose();
|
||||
resized.dispose();
|
||||
resolve(resultPoses);
|
||||
});
|
||||
|
@ -57,24 +36,12 @@ class PoseNet {
|
|||
}
|
||||
}
|
||||
exports.PoseNet = PoseNet;
|
||||
async function loadMobileNet(config) {
|
||||
const graphModel = await tf.loadGraphModel(config.modelPath);
|
||||
const mobilenet = new modelMobileNet.MobileNet(graphModel, config.outputStride);
|
||||
|
||||
async function load(config) {
|
||||
const graphModel = await tf.loadGraphModel(config.body.modelPath);
|
||||
const mobilenet = new modelMobileNet.MobileNet(graphModel, this.outputStride);
|
||||
// eslint-disable-next-line no-console
|
||||
console.log(`Human: load model: ${config.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
console.log(`Human: load model: ${config.body.modelPath.match(/\/(.*)\./)[1]}`);
|
||||
return new PoseNet(mobilenet);
|
||||
}
|
||||
/**
|
||||
* Loads the PoseNet model instance from a checkpoint, with the MobileNet architecture. The model to be loaded is configurable using the
|
||||
* config dictionary ModelConfig. Please find more details in the documentation of the ModelConfig.
|
||||
*
|
||||
* @param config ModelConfig dictionary that contains parameters for
|
||||
* the PoseNet loading process. Please find more details of each parameters
|
||||
* in the documentation of the ModelConfig interface. The predefined
|
||||
* `MOBILENET_V1_CONFIG` and `RESNET_CONFIG` can also be used as references
|
||||
* for defining your customized config.
|
||||
*/
|
||||
async function load(config) {
|
||||
return loadMobileNet(config);
|
||||
}
|
||||
exports.load = load;
|
||||
|
|
|
@ -30,11 +30,13 @@ function getBoundingBox(keypoints) {
|
|||
});
|
||||
}
|
||||
exports.getBoundingBox = getBoundingBox;
|
||||
|
||||
function getBoundingBoxPoints(keypoints) {
|
||||
const { minX, minY, maxX, maxY } = getBoundingBox(keypoints);
|
||||
return [{ x: minX, y: minY }, { x: maxX, y: minY }, { x: maxX, y: maxY }, { x: minX, y: maxY }];
|
||||
}
|
||||
exports.getBoundingBoxPoints = getBoundingBoxPoints;
|
||||
|
||||
async function toTensorBuffers3D(tensors) {
|
||||
return Promise.all(tensors.map((tensor) => tensor.buffer()));
|
||||
}
|
||||
|
|
|
@ -58,7 +58,7 @@ async function predict(image, config) {
|
|||
data = emotionT.dataSync();
|
||||
tf.dispose(emotionT);
|
||||
} else {
|
||||
const profileData = await tf.profile(() => models.emotion.predict(grayscale));
|
||||
const profileData = await tf.profile(() => models.emotion.predict(normalize));
|
||||
data = profileData.result.dataSync();
|
||||
profileData.result.dispose();
|
||||
profile.run('emotion', profileData);
|
||||
|
|
|
@ -97,9 +97,7 @@ class BlazeFaceModel {
|
|||
}
|
||||
const decodedBounds = decodeBounds(prediction, this.anchors, this.inputSize);
|
||||
const logits = tf.slice(prediction, [0, 0], [-1, 1]);
|
||||
// activation ('elu'|'hardSigmoid'|'linear'|'relu'|'relu6'| 'selu'|'sigmoid'|'softmax'|'softplus'|'softsign'|'tanh')
|
||||
// const scoresOut = tf.sigmoid(logits).squeeze();
|
||||
const scoresOut = logits.squeeze();
|
||||
const scoresOut = tf.sigmoid(logits).squeeze();
|
||||
return [prediction, decodedBounds, scoresOut];
|
||||
});
|
||||
const boxIndicesTensor = await tf.image.nonMaxSuppressionAsync(boxes, scores, this.config.detector.maxFaces, this.config.detector.iouThreshold, this.config.detector.scoreThreshold);
|
||||
|
|
|
@ -49,7 +49,7 @@ class Pipeline {
|
|||
this.meshWidth = config.mesh.inputSize;
|
||||
this.meshHeight = config.mesh.inputSize;
|
||||
this.irisSize = config.iris.inputSize;
|
||||
this.irisEnlarge = config.iris.enlargeFactor;
|
||||
this.irisEnlarge = 2.3;
|
||||
}
|
||||
|
||||
transformRawCoords(rawCoords, box, angle, rotationMatrix) {
|
||||
|
|
|
@ -57,6 +57,7 @@ class HandDetector {
|
|||
rawBoxes.dispose();
|
||||
const filteredT = await tf.image.nonMaxSuppressionAsync(boxes, scores, config.maxHands, config.iouThreshold, config.scoreThreshold);
|
||||
const filtered = filteredT.arraySync();
|
||||
|
||||
scores.dispose();
|
||||
filteredT.dispose();
|
||||
const hands = [];
|
||||
|
|
|
@ -134,12 +134,12 @@ class Human {
|
|||
this.models.gender || gender.load(this.config),
|
||||
this.models.emotion || emotion.load(this.config),
|
||||
this.models.facemesh || facemesh.load(this.config.face),
|
||||
this.models.posenet || posenet.load(this.config.body),
|
||||
this.models.posenet || posenet.load(this.config),
|
||||
this.models.handpose || handpose.load(this.config.hand),
|
||||
]);
|
||||
} else {
|
||||
if (this.config.face.enabled && !this.models.facemesh) this.models.facemesh = await facemesh.load(this.config.face);
|
||||
if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config.body);
|
||||
if (this.config.body.enabled && !this.models.posenet) this.models.posenet = await posenet.load(this.config);
|
||||
if (this.config.hand.enabled && !this.models.handpose) this.models.handpose = await handpose.load(this.config.hand);
|
||||
if (this.config.face.enabled && this.config.face.age.enabled && !this.models.age) this.models.age = await age.load(this.config);
|
||||
if (this.config.face.enabled && this.config.face.gender.enabled && !this.models.gender) this.models.gender = await gender.load(this.config);
|
||||
|
@ -327,12 +327,12 @@ class Human {
|
|||
// run posenet
|
||||
this.analyze('Start Body:');
|
||||
if (this.config.async) {
|
||||
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config.body) : [];
|
||||
poseRes = this.config.body.enabled ? this.models.posenet.estimatePoses(process.tensor, this.config) : [];
|
||||
if (this.perf.body) delete this.perf.body;
|
||||
} else {
|
||||
this.state = 'run:body';
|
||||
timeStamp = now();
|
||||
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config.body) : [];
|
||||
poseRes = this.config.body.enabled ? await this.models.posenet.estimatePoses(process.tensor, this.config) : [];
|
||||
this.perf.body = Math.trunc(now() - timeStamp);
|
||||
}
|
||||
this.analyze('End Body:');
|
||||
|
|
|
@ -18,7 +18,8 @@ function profile(name, data) {
|
|||
if (largest.length > maxResults) largest.length = maxResults;
|
||||
const res = { newBytes: data.newBytes, newTensors: data.newTensors, peakBytes: data.peakBytes, numKernelOps: data.kernels.length, timeKernelOps: time, slowestKernelOps: slowest, largestKernelOps: largest };
|
||||
profileData[name] = res;
|
||||
// eslint-disable-next-line no-console
|
||||
console.log('Human profiler', name, res);
|
||||
}
|
||||
|
||||
exports.run = profile;
|
||||
exports.data = profileData;
|
||||
|
|
2
wiki
2
wiki
|
@ -1 +1 @@
|
|||
Subproject commit cb7e64e4ff87f5288d9a9e2b4250c33e74911c68
|
||||
Subproject commit e73a55ab96efd7d1672d4c71dcef27dd1bee9f1d
|
Loading…
Reference in New Issue