diff --git a/.gitignore b/.gitignore index 14433e9c..7ddfe6d1 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,2 @@ node_modules -alternative +private diff --git a/TODO.md b/TODO.md index cec9e624..fc71ed9b 100644 --- a/TODO.md +++ b/TODO.md @@ -2,6 +2,7 @@ - Strong typing - Automated testing +- Guard against corrupt input - Improve face embedding - Build Face embedding database - Dynamic sample processing diff --git a/config.js b/config.js index 0846fcd4..f67e7a77 100644 --- a/config.js +++ b/config.js @@ -85,6 +85,7 @@ export default { scoreThreshold: 0.2, // threshold for deciding when to remove boxes based on score // in non-maximum suppression, // this is applied on detection objects only and before minConfidence + return: true, // return extracted face as tensor }, mesh: { diff --git a/demo/embedding.html b/demo/embedding.html new file mode 100644 index 00000000..c664ab13 --- /dev/null +++ b/demo/embedding.html @@ -0,0 +1,32 @@ + + + + Human + + + + + + + + + + + + + + + +
Sample Images: +
+
Extracted Faces - click on a face to sort by simmilarity:
+
+ + diff --git a/demo/embedding.js b/demo/embedding.js new file mode 100644 index 00000000..620a93b4 --- /dev/null +++ b/demo/embedding.js @@ -0,0 +1,94 @@ +import Human from '../dist/human.esm.js'; + +const userConfig = { + backend: 'wasm', + async: false, + warmup: 'none', + debug: true, + filter: false, + videoOptimized: false, + face: { + enabled: true, + detector: { rotation: true }, + mesh: { enabled: true }, + embedding: { enabled: true, modelPath: '../models/mobilefacenet.json' }, + iris: { enabled: false }, + age: { enabled: false }, + gender: { enabled: false }, + emotion: { enabled: false }, + }, + hand: { enabled: false }, + gesture: { enabled: false }, + body: { enabled: false }, +}; +const human = new Human(userConfig); +const samples = ['../assets/sample-me.jpg', '../assets/sample6.jpg', '../assets/sample1.jpg', '../assets/sample4.jpg', '../assets/sample5.jpg', '../assets/sample3.jpg', '../assets/sample2.jpg', + '../private/me (1).jpg', '../private/me (2).jpg', '../private/me (3).jpg', '../private/me (4).jpg', '../private/me (5).jpg', '../private/me (6).jpg', '../private/me (7).jpg', '../private/me (8).jpg', + '../private/me (9).jpg', '../private/me (10).jpg', '../private/me (11).jpg', '../private/me (12).jpg', '../private/me (13).jpg']; +const all = []; + +function log(...msg) { + const dt = new Date(); + const ts = `${dt.getHours().toString().padStart(2, '0')}:${dt.getMinutes().toString().padStart(2, '0')}:${dt.getSeconds().toString().padStart(2, '0')}.${dt.getMilliseconds().toString().padStart(3, '0')}`; + // eslint-disable-next-line no-console + console.log(ts, ...msg); +} + +async function analyze(face) { + log('Face:', face); + const canvases = document.getElementsByClassName('face'); + for (const canvas of canvases) { + const res = human.simmilarity(face.embedding, all[canvas.tag.sample][canvas.tag.face].embedding); + canvas.title = res; + await human.tf.browser.toPixels(all[canvas.tag.sample][canvas.tag.face].tensor, canvas); + const ctx = canvas.getContext('2d'); + ctx.font = 'small-caps 1rem "Lato"'; + ctx.fillStyle = 'rgba(0, 0, 0, 1)'; + ctx.fillText(`${(100 * res).toFixed(1)}%`, 3, 19); + ctx.fillStyle = 'rgba(255, 255, 255, 1)'; + ctx.fillText(`${(100 * res).toFixed(1)}%`, 4, 20); + } + const sorted = document.getElementById('faces'); + [...sorted.children] + .sort((a, b) => parseFloat(b.title) - parseFloat(a.title)) + .forEach((canvas) => sorted.appendChild(canvas)); +} + +async function faces(index, res) { + all[index] = res.face; + for (const i in res.face) { + // log(res.face[i]); + const canvas = document.createElement('canvas'); + canvas.tag = { sample: index, face: i }; + canvas.width = 200; + canvas.height = 200; + canvas.className = 'face'; + canvas.addEventListener('click', (evt) => { + log('Select:', evt.target.tag.sample, evt.target.tag.face); + analyze(all[evt.target.tag.sample][evt.target.tag.face]); + }); + human.tf.browser.toPixels(res.face[i].tensor, canvas); + document.getElementById('faces').appendChild(canvas); + } +} + +async function add(index) { + log('Add:', samples[index]); + return new Promise((resolve) => { + const img = new Image(100, 100); + img.onload = () => { + human.detect(img).then((res) => faces(index, res)); + document.getElementById('images').appendChild(img); + resolve(true); + }; + img.title = samples[index]; + img.src = samples[index]; + }); +} + +async function main() { + await human.load(); + for (const i in samples) await add(i); +} + +window.onload = main; diff --git a/src/human.ts b/src/human.ts index 806cb217..e7b7a60e 100644 --- a/src/human.ts +++ b/src/human.ts @@ -303,7 +303,8 @@ class Human { emotion: string, embedding: any, iris: number, - angle: any + angle: any, + tensor: any, }> = []; this.state = 'run:face'; @@ -402,7 +403,7 @@ class Human { embedding: embeddingRes, iris: (irisSize !== 0) ? Math.trunc(irisSize) / 100 : 0, angle, - // image: face.image.toInt().squeeze(), + tensor: this.config.face.detector.return ? face.image.squeeze() : null, }); // dont need face anymore diff --git a/wiki b/wiki index 69294f7a..fa7ac1f6 160000 --- a/wiki +++ b/wiki @@ -1 +1 @@ -Subproject commit 69294f7a0a99bd996286f8f5bb655c7ea8bfc10d +Subproject commit fa7ac1f695547aa0fd25845e6cac7ed5ee0adcae