updated example

pull/13/head
Vladimir Mandic 2020-10-12 14:28:19 -04:00
parent 950ed99269
commit a8b56b12b4
12 changed files with 4314 additions and 4905 deletions

View File

@ -1,6 +1,6 @@
{ {
"name": "@vladmandic/face-api", "name": "@vladmandic/face-api",
"version": "0.5.3", "version": "0.6.2",
"description": "JavaScript module for Face Detection and Face Recognition Using Tensorflow/JS", "description": "JavaScript module for Face Detection and Face Recognition Using Tensorflow/JS",
"main": "build/src/index.js", "main": "build/src/index.js",
"browser": "dist/face-api.js", "browser": "dist/face-api.js",
@ -10,7 +10,7 @@
}, },
"type": "module", "type": "module",
"scripts": { "scripts": {
"build-esm": "esbuild --bundle --format=esm --target=esnext --platform=browser --sourcemap --outfile=./dist/face-api.esm.js --external:@tensorflow/tfjs --log-level=error --tsconfig=./tsconfig.json build/src/index.js", "build-esm": "esbuild --bundle --format=esm --target=esnext --platform=browser --sourcemap --outfile=./dist/face-api.esm.js --minify --log-level=error --tsconfig=./tsconfig.json build/src/index.js",
"build-iife": "esbuild --bundle --format=iife --target=esnext --platform=browser --sourcemap --outfile=./dist/face-api.js --global-name=faceapi --minify --log-level=error --tsconfig=./tsconfig.json build/src/index.js", "build-iife": "esbuild --bundle --format=iife --target=esnext --platform=browser --sourcemap --outfile=./dist/face-api.js --global-name=faceapi --minify --log-level=error --tsconfig=./tsconfig.json build/src/index.js",
"build": "rimraf build/ dist/ && tsc && npm run build-esm && npm run build-iife" "build": "rimraf build/ dist/ && tsc && npm run build-esm && npm run build-iife"
}, },

View File

@ -24,7 +24,6 @@ import * as pkg from '../package.json';
const node = (typeof process !== 'undefined'); const node = (typeof process !== 'undefined');
const browser = (typeof navigator !== 'undefined') && (typeof navigator.userAgent !== 'undefined'); const browser = (typeof navigator !== 'undefined') && (typeof navigator.userAgent !== 'undefined');
export const version = { faceapi: pkg.version, node, browser }; export const version = { faceapi: pkg.version, node, browser };
import { PlatformBrowser } from './Platform'; // import {PlatformBrowser} from './Platform';
if (!tf.env().platform && tf.env().get('IS_BROWSER')) // if (!tf.env().platform && tf.env().get('IS_BROWSER')) tf.env().setPlatform('browser', new PlatformBrowser);
tf.env().setPlatform('browser', new PlatformBrowser);
//# sourceMappingURL=index.js.map //# sourceMappingURL=index.js.map

View File

@ -1 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,4BAA4B;AAC5B,sCAAsC;AAEtC,OAAO,KAAK,EAAE,MAAM,kBAAkB,CAAC;AACvC,OAAO,KAAK,IAAI,MAAM,QAAQ,CAAC;AAC/B,OAAO,KAAK,KAAK,MAAM,SAAS,CAAC;AACjC,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC;AAE3B,cAAc,sBAAsB,CAAC;AACrC,cAAc,iBAAiB,CAAC;AAChC,cAAc,aAAa,CAAA;AAC3B,cAAc,aAAa,CAAC;AAC5B,cAAc,2BAA2B,CAAC;AAC1C,cAAc,yBAAyB,CAAC;AACxC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,aAAa,CAAC;AAC5B,cAAc,wBAAwB,CAAC;AACvC,cAAc,0BAA0B,CAAC;AACzC,cAAc,oBAAoB,CAAC;AACnC,cAAc,qBAAqB,CAAC;AACpC,cAAc,iBAAiB,CAAC;AAChC,cAAc,iBAAiB,CAAC;AAEhC,OAAO,KAAK,GAAG,MAAM,iBAAiB,CAAC;AACvC,MAAM,IAAI,GAAG,CAAC,OAAO,OAAO,KAAK,WAAW,CAAC,CAAC;AAC9C,MAAM,OAAO,GAAG,CAAC,OAAO,SAAS,KAAK,WAAW,CAAC,IAAI,CAAC,OAAO,SAAS,CAAC,SAAS,KAAK,WAAW,CAAC,CAAC;AACnG,MAAM,CAAC,MAAM,OAAO,GAAG,EAAE,OAAO,EAAE,GAAG,CAAC,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC;AAE/D,OAAO,EAAC,eAAe,EAAC,MAAM,YAAY,CAAC;AAC3C,IAAI,CAAC,EAAE,CAAC,GAAG,EAAE,CAAC,QAAQ,IAAI,EAAE,CAAC,GAAG,EAAE,CAAC,GAAG,CAAC,YAAY,CAAC;IAAE,EAAE,CAAC,GAAG,EAAE,CAAC,WAAW,CAAC,SAAS,EAAE,IAAI,eAAe,CAAC,CAAC"} {"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,4BAA4B;AAC5B,sCAAsC;AAEtC,OAAO,KAAK,EAAE,MAAM,kBAAkB,CAAC;AACvC,OAAO,KAAK,IAAI,MAAM,QAAQ,CAAC;AAC/B,OAAO,KAAK,KAAK,MAAM,SAAS,CAAC;AACjC,OAAO,EAAE,IAAI,EAAE,KAAK,EAAE,EAAE,EAAE,CAAC;AAE3B,cAAc,sBAAsB,CAAC;AACrC,cAAc,iBAAiB,CAAC;AAChC,cAAc,aAAa,CAAA;AAC3B,cAAc,aAAa,CAAC;AAC5B,cAAc,2BAA2B,CAAC;AAC1C,cAAc,yBAAyB,CAAC;AACxC,cAAc,4BAA4B,CAAC;AAC3C,cAAc,mBAAmB,CAAC;AAClC,cAAc,mBAAmB,CAAC;AAClC,cAAc,aAAa,CAAC;AAC5B,cAAc,wBAAwB,CAAC;AACvC,cAAc,0BAA0B,CAAC;AACzC,cAAc,oBAAoB,CAAC;AACnC,cAAc,qBAAqB,CAAC;AACpC,cAAc,iBAAiB,CAAC;AAChC,cAAc,iBAAiB,CAAC;AAEhC,OAAO,KAAK,GAAG,MAAM,iBAAiB,CAAC;AACvC,MAAM,IAAI,GAAG,CAAC,OAAO,OAAO,KAAK,WAAW,CAAC,CAAC;AAC9C,MAAM,OAAO,GAAG,CAAC,OAAO,SAAS,KAAK,WAAW,CAAC,IAAI,CAAC,OAAO,SAAS,CAAC,SAAS,KAAK,WAAW,CAAC,CAAC;AACnG,MAAM,CAAC,MAAM,OAAO,GAAG,EAAE,OAAO,EAAE,GAAG,CAAC,OAAO,EAAE,IAAI,EAAE,OAAO,EAAE,CAAC;AAE/D,8CAA8C;AAC9C,8GAA8G"}

8424
dist/face-api.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

354
dist/face-api.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -6,181 +6,10 @@
<meta content="text/html"> <meta content="text/html">
<meta charset="UTF-8"> <meta charset="UTF-8">
<script src="https://cdnjs.cloudflare.com/ajax/libs/tensorflow/2.6.0/tf.min.js"></script> <script src="https://cdnjs.cloudflare.com/ajax/libs/tensorflow/2.6.0/tf.min.js"></script>
<script src="https://vladmandic.github.io/face-api/dist/face-api.js"></script> <script src="./index.js" type="module"></script>
<style>
body { font-family: monospace; background: black; color: white; font-size: 16px; line-height: 22px; margin: 0; }
</style>
</head> </head>
<body> <body style="font-family: monospace; background: black; color: white; font-size: 16px; line-height: 22px; margin: 0;">
<div id="log"></div> <div id="log"></div>
<script>
/* global faceapi */ // face-api is loaded via <script src> in a <head> section
/* tfjs should be loaded explicitly and is not embedded inside facepi.js to keep size small and allow reusability */
// configuration options
const modelPath = 'https://vladmandic.github.io/face-api/model/'; // path to model folder that will be loaded using http
const imgSize = 512; // maximum image size in pixels
const minScore = 0.1; // minimum score
const maxResults = 5; // maximum number of results to return
const samples = ['sample (1).jpg', 'sample (2).jpg', 'sample (3).jpg', 'sample (4).jpg', 'sample (5).jpg', 'sample (6).jpg']; // sample images to be loaded using http
// helper function to pretty-print json object to string
function str(json) {
let text = '<font color="lightblue">';
text += json ? JSON.stringify(json).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ', ') : '';
text += '</font>';
return text;
}
// helper function to print strings to html document as a log
function log(...txt) {
// eslint-disable-next-line no-console
console.log(...txt);
document.getElementById('log').innerHTML += `<br>${txt}`;
}
// helper function to draw detected faces
function faces(name, title, id, data) {
// create canvas to draw on
const img = document.getElementById(id);
const canvas = document.createElement('canvas');
canvas.style.position = 'absolute';
canvas.style.left = `${img.offsetLeft}px`;
canvas.style.top = `${img.offsetTop}px`;
canvas.width = img.width;
canvas.height = img.height;
const ctx = canvas.getContext('2d');
// draw title
ctx.font = '1rem sans-serif';
ctx.fillStyle = 'black';
ctx.fillText(name, 2, 15);
ctx.fillText(title, 2, 35);
for (const person of data) {
// draw box around each face
ctx.lineWidth = 3;
ctx.strokeStyle = 'deepskyblue';
ctx.fillStyle = 'deepskyblue';
ctx.globalAlpha = 0.4;
ctx.beginPath();
ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height);
ctx.stroke();
ctx.globalAlpha = 1;
ctx.fillText(`${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 18);
ctx.fillText(`${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 2);
// draw face points for each face
ctx.fillStyle = 'lightblue';
ctx.globalAlpha = 0.5;
const pointSize = 2;
for (const pt of person.landmarks.positions) {
ctx.beginPath();
ctx.arc(pt.x, pt.y, pointSize, 0, 2 * Math.PI);
ctx.fill();
}
}
// add canvas to document
document.body.appendChild(canvas);
}
// helper function to draw processed image and its results
function print(title, img, data) {
// eslint-disable-next-line no-console
console.log('Results:', title, img, data);
const el = new Image();
el.id = Math.floor(Math.random() * 100000);
el.src = img;
el.width = imgSize;
el.onload = () => faces(img, title, el.id, data);
document.body.appendChild(el);
}
// loads image and draws it on resized canvas so we alwys have correct image size regardless of source
async function image(url) {
return new Promise((resolve) => {
const img = new Image();
// wait until image is actually loaded
img.addEventListener('load', () => {
// resize image so larger axis is not bigger than limit
const ratio = 1.0 * img.height / img.width;
img.width = ratio <= 1 ? imgSize : 1.0 * imgSize / ratio;
img.height = ratio >= 1 ? imgSize : 1.0 * imgSize * ratio;
// create canvas and draw loaded image
const canvas = document.createElement('canvas');
canvas.height = img.height;
canvas.width = img.width;
const ctx = canvas.getContext('2d');
ctx.drawImage(img, 0, 0, img.width, img.height);
// return generated canvas to be used by tfjs during detection
resolve(canvas);
});
// load image
img.src = url;
});
}
async function main() {
// initialize tfjs
log('FaceAPI Test');
window.tf = faceapi.tf;
await faceapi.tf.setBackend('webgl');
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready();
// check version
log(`Version: TensorFlow/JS ${str(faceapi.tf?.version_core || '(not loaded)')} FaceAPI ${str(faceapi?.version || '(not loaded)')} Backend: ${str(faceapi.tf?.getBackend() || '(not loaded)')}`);
log(`Flags: ${JSON.stringify(faceapi.tf.ENV.flags)}`);
// load face-api models
log('Loading FaceAPI models');
await faceapi.nets.tinyFaceDetector.load(modelPath);
await faceapi.nets.ssdMobilenetv1.load(modelPath);
await faceapi.nets.ageGenderNet.load(modelPath);
await faceapi.nets.faceLandmark68Net.load(modelPath);
await faceapi.nets.faceRecognitionNet.load(modelPath);
await faceapi.nets.faceExpressionNet.load(modelPath);
const optionsTinyFace = new faceapi.TinyFaceDetectorOptions({ inputSize: imgSize, scoreThreshold: minScore });
const optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: minScore, maxResults });
// check tf engine state
const engine = await faceapi.tf.engine();
log(`TF Engine State: ${str(engine.state)}`);
// loop through all images and try to process them
log(`Start processing: ${samples.length} images ...<br>`);
for (const img of samples) {
// new line
document.body.appendChild(document.createElement('br'));
// load and resize image
const canvas = await image(img);
try {
// actual model execution
const dataTinyYolo = await faceapi
.detectAllFaces(canvas, optionsTinyFace)
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptors()
.withAgeAndGender();
// print results to screen
print('TinyFace Detector', img, dataTinyYolo);
// actual model execution
const dataSSDMobileNet = await faceapi
.detectAllFaces(canvas, optionsSSDMobileNet)
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptors()
.withAgeAndGender();
// print results to screen
print('SSD MobileNet', img, dataSSDMobileNet);
} catch (err) {
log(`Image: ${img} Error during processing ${str(err)}`);
// eslint-disable-next-line no-console
console.error(err);
}
}
}
// start processing as soon as page is loaded
window.onload = main;
</script> </script>
</body> </body>
</html> </html>

165
example/index.js Normal file
View File

@ -0,0 +1,165 @@
import * as faceapi from '../dist/face-api.esm.js';
// configuration options
const modelPath = 'https://vladmandic.github.io/face-api/model/'; // path to model folder that will be loaded using http
const imgSize = 512; // maximum image size in pixels
const minScore = 0.1; // minimum score
const maxResults = 5; // maximum number of results to return
const samples = ['sample (1).jpg', 'sample (2).jpg', 'sample (3).jpg', 'sample (4).jpg', 'sample (5).jpg', 'sample (6).jpg']; // sample images to be loaded using http
// helper function to pretty-print json object to string
function str(json) {
let text = '<font color="lightblue">';
text += json ? JSON.stringify(json).replace(/{|}|"|\[|\]/g, '').replace(/,/g, ', ') : '';
text += '</font>';
return text;
}
// helper function to print strings to html document as a log
function log(...txt) {
// eslint-disable-next-line no-console
console.log(...txt);
document.getElementById('log').innerHTML += `<br>${txt}`;
}
// helper function to draw detected faces
function faces(name, title, id, data) {
// create canvas to draw on
const img = document.getElementById(id);
const canvas = document.createElement('canvas');
canvas.style.position = 'absolute';
canvas.style.left = `${img.offsetLeft}px`;
canvas.style.top = `${img.offsetTop}px`;
canvas.width = img.width;
canvas.height = img.height;
const ctx = canvas.getContext('2d');
// draw title
ctx.font = '1rem sans-serif';
ctx.fillStyle = 'black';
ctx.fillText(name, 2, 15);
ctx.fillText(title, 2, 35);
for (const person of data) {
// draw box around each face
ctx.lineWidth = 3;
ctx.strokeStyle = 'deepskyblue';
ctx.fillStyle = 'deepskyblue';
ctx.globalAlpha = 0.4;
ctx.beginPath();
ctx.rect(person.detection.box.x, person.detection.box.y, person.detection.box.width, person.detection.box.height);
ctx.stroke();
ctx.globalAlpha = 1;
ctx.fillText(`${Math.round(100 * person.genderProbability)}% ${person.gender}`, person.detection.box.x, person.detection.box.y - 18);
ctx.fillText(`${Math.round(person.age)} years`, person.detection.box.x, person.detection.box.y - 2);
// draw face points for each face
ctx.fillStyle = 'lightblue';
ctx.globalAlpha = 0.5;
const pointSize = 2;
for (const pt of person.landmarks.positions) {
ctx.beginPath();
ctx.arc(pt.x, pt.y, pointSize, 0, 2 * Math.PI);
ctx.fill();
}
}
// add canvas to document
document.body.appendChild(canvas);
}
// helper function to draw processed image and its results
function print(title, img, data) {
// eslint-disable-next-line no-console
console.log('Results:', title, img, data);
const el = new Image();
el.id = Math.floor(Math.random() * 100000);
el.src = img;
el.width = imgSize;
el.onload = () => faces(img, title, el.id, data);
document.body.appendChild(el);
}
// loads image and draws it on resized canvas so we alwys have correct image size regardless of source
async function image(url) {
return new Promise((resolve) => {
const img = new Image();
// wait until image is actually loaded
img.addEventListener('load', () => {
// resize image so larger axis is not bigger than limit
const ratio = 1.0 * img.height / img.width;
img.width = ratio <= 1 ? imgSize : 1.0 * imgSize / ratio;
img.height = ratio >= 1 ? imgSize : 1.0 * imgSize * ratio;
// create canvas and draw loaded image
const canvas = document.createElement('canvas');
canvas.height = img.height;
canvas.width = img.width;
const ctx = canvas.getContext('2d');
ctx.drawImage(img, 0, 0, img.width, img.height);
// return generated canvas to be used by tfjs during detection
resolve(canvas);
});
// load image
img.src = url;
});
}
async function main() {
// initialize tfjs
log('FaceAPI Test');
await faceapi.tf.setBackend('webgl');
await faceapi.tf.enableProdMode();
await faceapi.tf.ENV.set('DEBUG', false);
await faceapi.tf.ready();
// check version
log(`Version: TensorFlow/JS ${str(faceapi.tf?.version_core || '(not loaded)')} FaceAPI ${str(faceapi?.version || '(not loaded)')} Backend: ${str(faceapi.tf?.getBackend() || '(not loaded)')}`);
log(`Flags: ${JSON.stringify(faceapi.tf.ENV.flags)}`);
// load face-api models
log('Loading FaceAPI models');
await faceapi.nets.tinyFaceDetector.load(modelPath);
await faceapi.nets.ssdMobilenetv1.load(modelPath);
await faceapi.nets.ageGenderNet.load(modelPath);
await faceapi.nets.faceLandmark68Net.load(modelPath);
await faceapi.nets.faceRecognitionNet.load(modelPath);
await faceapi.nets.faceExpressionNet.load(modelPath);
const optionsTinyFace = new faceapi.TinyFaceDetectorOptions({ inputSize: imgSize, scoreThreshold: minScore });
const optionsSSDMobileNet = new faceapi.SsdMobilenetv1Options({ minConfidence: minScore, maxResults });
// check tf engine state
const engine = await faceapi.tf.engine();
log(`TF Engine State: ${str(engine.state)}`);
// loop through all images and try to process them
log(`Start processing: ${samples.length} images ...<br>`);
for (const img of samples) {
// new line
document.body.appendChild(document.createElement('br'));
// load and resize image
const canvas = await image(img);
try {
// actual model execution
const dataTinyYolo = await faceapi
.detectAllFaces(canvas, optionsTinyFace)
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptors()
.withAgeAndGender();
// print results to screen
print('TinyFace Detector', img, dataTinyYolo);
// actual model execution
const dataSSDMobileNet = await faceapi
.detectAllFaces(canvas, optionsSSDMobileNet)
.withFaceLandmarks()
.withFaceExpressions()
.withFaceDescriptors()
.withAgeAndGender();
// print results to screen
print('SSD MobileNet', img, dataSSDMobileNet);
} catch (err) {
log(`Image: ${img} Error during processing ${str(err)}`);
// eslint-disable-next-line no-console
console.error(err);
}
}
}
// start processing as soon as page is loaded
window.onload = main;

6
package-lock.json generated
View File

@ -442,9 +442,9 @@
"dev": true "dev": true
}, },
"typescript": { "typescript": {
"version": "4.1.0-dev.20201011", "version": "4.1.0-dev.20201012",
"resolved": "https://registry.npmjs.org/typescript/-/typescript-4.1.0-dev.20201011.tgz", "resolved": "https://registry.npmjs.org/typescript/-/typescript-4.1.0-dev.20201012.tgz",
"integrity": "sha512-+Lfj2Q+dXFAJbfB1jQQRlhTssgl0CQVo8a+CNzlNXsnC1cN1yoH40wkEjWd6uB8NGkamxKda1xrEb2miJtpP5g==", "integrity": "sha512-afzjHTxmS6JszBOz3CkQqlP+5ZlvZ/ACbRJMXJWhEYfHPCDHdQGZVEMbdnYICM1KfgN2SUpubWJsWvyxkoNmHA==",
"dev": true "dev": true
}, },
"wrap-ansi": { "wrap-ansi": {

View File

@ -10,7 +10,7 @@
}, },
"type": "module", "type": "module",
"scripts": { "scripts": {
"build-esm": "esbuild --bundle --format=esm --target=esnext --platform=browser --sourcemap --outfile=./dist/face-api.esm.js --external:@tensorflow/tfjs --log-level=error --tsconfig=./tsconfig.json build/src/index.js", "build-esm": "esbuild --bundle --format=esm --target=esnext --platform=browser --sourcemap --outfile=./dist/face-api.esm.js --minify --log-level=error --tsconfig=./tsconfig.json build/src/index.js",
"build-iife": "esbuild --bundle --format=iife --target=esnext --platform=browser --sourcemap --outfile=./dist/face-api.js --global-name=faceapi --minify --log-level=error --tsconfig=./tsconfig.json build/src/index.js", "build-iife": "esbuild --bundle --format=iife --target=esnext --platform=browser --sourcemap --outfile=./dist/face-api.js --global-name=faceapi --minify --log-level=error --tsconfig=./tsconfig.json build/src/index.js",
"build": "rimraf build/ dist/ && tsc && npm run build-esm && npm run build-iife" "build": "rimraf build/ dist/ && tsc && npm run build-esm && npm run build-iife"
}, },

View File

@ -28,5 +28,5 @@ const node = (typeof process !== 'undefined');
const browser = (typeof navigator !== 'undefined') && (typeof navigator.userAgent !== 'undefined'); const browser = (typeof navigator !== 'undefined') && (typeof navigator.userAgent !== 'undefined');
export const version = { faceapi: pkg.version, node, browser }; export const version = { faceapi: pkg.version, node, browser };
import {PlatformBrowser} from './Platform'; // import {PlatformBrowser} from './Platform';
if (!tf.env().platform && tf.env().get('IS_BROWSER')) tf.env().setPlatform('browser', new PlatformBrowser); // if (!tf.env().platform && tf.env().get('IS_BROWSER')) tf.env().setPlatform('browser', new PlatformBrowser);