face-api/demo/node-singleprocess.js

90 lines
3.3 KiB
JavaScript
Raw Normal View History

2020-12-27 00:35:17 +01:00
// @ts-nocheck
2020-12-08 14:33:00 +01:00
const fs = require('fs');
2021-02-21 03:49:39 +01:00
const process = require('process');
2020-12-08 14:33:00 +01:00
const path = require('path');
2021-03-07 13:26:23 +01:00
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
2020-12-08 14:33:00 +01:00
const log = require('@vladmandic/pilogger');
2021-03-23 14:36:41 +01:00
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require, no-unused-vars
2020-12-08 14:33:00 +01:00
const tf = require('@tensorflow/tfjs-node');
2021-03-23 14:36:41 +01:00
// eslint-disable-next-line import/no-extraneous-dependencies, node/no-unpublished-require
const canvas = require('canvas');
2020-12-08 14:33:00 +01:00
const faceapi = require('../dist/face-api.node.js'); // this is equivalent to '@vladmandic/faceapi'
const modelPathRoot = '../model';
2021-03-14 13:47:38 +01:00
const imgPathRoot = './demo'; // modify to include your sample images
2020-12-08 14:33:00 +01:00
const minScore = 0.1;
const maxResults = 5;
2021-02-21 03:49:39 +01:00
let optionsSSDMobileNet;
2020-12-08 14:33:00 +01:00
2021-03-23 14:36:41 +01:00
async function image(input) {
const img = canvas.loadImage(input);
const c = canvas.createCanvas(img.width, img.height);
const ctx = c.getContext('2d');
ctx.drawImage(img, 0, 0, img.width, img.height);
return c;
2020-12-08 14:33:00 +01:00
}
2021-02-21 03:49:39 +01:00
async function detect(tensor) {
const result = await faceapi
.detectAllFaces(tensor, optionsSSDMobileNet)
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptors()
.withAgeAndGender();
return result;
}
2020-12-08 14:33:00 +01:00
async function main() {
log.header();
log.info('FaceAPI single-process test');
2021-03-23 14:36:41 +01:00
faceapi.env.monkeyPatch({ Canvas: canvas.Canvas, Image: canvas.Image, ImageData: canvas.ImageData });
2020-12-08 14:33:00 +01:00
await faceapi.tf.setBackend('tensorflow');
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready();
log.state(`Version: TensorFlow/JS ${faceapi.tf?.version_core} FaceAPI ${faceapi.version.faceapi} Backend: ${faceapi.tf?.getBackend()}`);
log.info('Loading FaceAPI models');
const modelPath = path.join(__dirname, modelPathRoot);
await faceapi.nets.ssdMobilenetv1.loadFromDisk(modelPath);
await faceapi.nets.ageGenderNet.loadFromDisk(modelPath);
await faceapi.nets.faceLandmark68Net.loadFromDisk(modelPath);
await faceapi.nets.faceRecognitionNet.loadFromDisk(modelPath);
await faceapi.nets.faceExpressionNet.loadFromDisk(modelPath);
2021-02-21 03:49:39 +01:00
optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: minScore, maxResults });
if (process.argv.length !== 3) {
const t0 = process.hrtime.bigint();
const dir = fs.readdirSync(imgPathRoot);
for (const img of dir) {
if (!img.toLocaleLowerCase().endsWith('.jpg')) continue;
const tensor = await image(path.join(imgPathRoot, img));
const result = await detect(tensor);
log.data('Image:', img, 'Detected faces:', result.length);
2021-03-23 14:36:41 +01:00
for (const i of result) {
log.data('Gender:', Math.round(100 * i.genderProbability), 'probability', i.gender, 'with age', Math.round(10 * i.age) / 10);
}
2021-02-21 03:49:39 +01:00
tensor.dispose();
}
const t1 = process.hrtime.bigint();
log.info('Processed', dir.length, 'images in', Math.trunc(parseInt(t1 - t0) / 1000 / 1000), 'ms');
} else {
const param = process.argv[2];
if (fs.existsSync(param)) {
const tensor = await image(param);
const result = await detect(tensor);
log.data('Image:', param, 'Detected faces:', result.length);
for (const i of result) {
2021-03-23 14:36:41 +01:00
log.data('Gender:', Math.round(100 * i.genderProbability), 'probability', i.gender, 'with age', Math.round(10 * i.age) / 10);
2021-02-21 03:49:39 +01:00
}
tensor.dispose();
}
2020-12-08 14:33:00 +01:00
}
}
main();