Compare commits

..

No commits in common. "master" and "v1.0" have entirely different histories.
master ... v1.0

1254 changed files with 17616 additions and 34108 deletions

View File

@ -1,148 +0,0 @@
{
"log": {
"enabled": false,
"debug": false,
"console": true,
"output": "build.log"
},
"profiles": {
"production": ["compile", "typings", "typedoc", "lint", "changelog"],
"development": ["serve", "watch", "compile"]
},
"clean": {
"locations": ["dist/*", "typedoc/*", "types/lib/src"]
},
"lint": {
"locations": [ "src/" ],
"rules": { }
},
"changelog": {
"log": "CHANGELOG.md"
},
"serve": {
"sslKey": "cert/https.key",
"sslCrt": "cert/https.crt",
"httpPort": 8000,
"httpsPort": 8001,
"documentRoot": ".",
"defaultFolder": "demo",
"defaultFile": "index.html"
},
"build": {
"global": {
"target": "es2018",
"treeShaking": true,
"ignoreAnnotations": true,
"sourcemap": false,
"banner": { "js": "/*\n Face-API\n homepage: <https://github.com/vladmandic/face-api>\n author: <https://github.com/vladmandic>'\n*/\n" }
},
"targets": [
{
"name": "tfjs/browser/tf-version",
"platform": "browser",
"format": "esm",
"input": "src/tfjs/tf-version.ts",
"output": "dist/tfjs.version.js"
},
{
"name": "tfjs/node/cpu",
"platform": "node",
"format": "cjs",
"input": "src/tfjs/tf-node.ts",
"output": "dist/tfjs.esm.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/node/cpu",
"platform": "node",
"format": "cjs",
"input": "src/index.ts",
"output": "dist/face-api.node.js",
"external": ["@tensorflow"]
},
{
"name": "tfjs/node/gpu",
"platform": "node",
"format": "cjs",
"input": "src/tfjs/tf-node-gpu.ts",
"output": "dist/tfjs.esm.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/node/gpu",
"platform": "node",
"format": "cjs",
"input": "src/index.ts",
"output": "dist/face-api.node-gpu.js",
"external": ["@tensorflow"]
},
{
"name": "tfjs/node/wasm",
"platform": "node",
"format": "cjs",
"input": "src/tfjs/tf-node-wasm.ts",
"output": "dist/tfjs.esm.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/node/wasm",
"platform": "node",
"format": "cjs",
"input": "src/index.ts",
"output": "dist/face-api.node-wasm.js",
"external": ["@tensorflow"]
},
{
"name": "tfjs/browser/esm/nobundle",
"platform": "browser",
"format": "esm",
"input": "src/tfjs/tf-browser.ts",
"output": "dist/tfjs.esm.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/browser/esm/nobundle",
"platform": "browser",
"format": "esm",
"input": "src/index.ts",
"output": "dist/face-api.esm-nobundle.js",
"external": ["@tensorflow"]
},
{
"name": "tfjs/browser/esm/bundle",
"platform": "browser",
"format": "esm",
"input": "src/tfjs/tf-browser.ts",
"output": "dist/tfjs.esm.js"
},
{
"name": "faceapi/browser/iife/bundle",
"platform": "browser",
"format": "iife",
"globalName": "faceapi",
"minify": true,
"input": "src/index.ts",
"output": "dist/face-api.js",
"external": ["@tensorflow"]
},
{
"name": "faceapi/browser/esm/bundle",
"platform": "browser",
"format": "esm",
"sourcemap": true,
"input": "src/index.ts",
"output": "dist/face-api.esm.js",
"typings": "types/lib",
"typedoc": "typedoc",
"external": ["@tensorflow"]
}
]
},
"watch": {
"enabled": true,
"locations": [ "src/**" ]
},
"typescript": {
"allowJs": false
}
}

View File

@ -1,76 +0,0 @@
{
"globals": {},
"env": {
"browser": true,
"commonjs": true,
"node": true,
"es2020": true
},
"parser": "@typescript-eslint/parser",
"parserOptions": { "ecmaVersion": "latest" },
"plugins": [
"@typescript-eslint"
],
"extends": [
"eslint:recommended",
"plugin:import/errors",
"plugin:import/warnings",
"plugin:node/recommended",
"plugin:promise/recommended",
"plugin:@typescript-eslint/eslint-recommended",
"plugin:@typescript-eslint/recommended",
"airbnb-base"
],
"ignorePatterns": [ "node_modules", "types" ],
"rules": {
"@typescript-eslint/no-explicit-any": "off",
"@typescript-eslint/ban-types": "off",
"@typescript-eslint/ban-ts-comment": "off",
"@typescript-eslint/explicit-module-boundary-types": "off",
"@typescript-eslint/no-var-requires": "off",
"@typescript-eslint/no-empty-object-type": "off",
"@typescript-eslint/no-require-imports": "off",
"camelcase": "off",
"class-methods-use-this": "off",
"default-param-last": "off",
"dot-notation": "off",
"func-names": "off",
"guard-for-in": "off",
"import/extensions": "off",
"import/no-extraneous-dependencies": "off",
"import/no-named-as-default": "off",
"import/no-unresolved": "off",
"import/prefer-default-export": "off",
"lines-between-class-members": "off",
"max-len": [1, 275, 3],
"newline-per-chained-call": "off",
"no-async-promise-executor": "off",
"no-await-in-loop": "off",
"no-bitwise": "off",
"no-case-declarations":"off",
"no-continue": "off",
"no-loop-func": "off",
"no-mixed-operators": "off",
"no-param-reassign":"off",
"no-plusplus": "off",
"no-regex-spaces": "off",
"no-restricted-globals": "off",
"no-restricted-syntax": "off",
"no-return-assign": "off",
"no-underscore-dangle": "off",
"no-promise-executor-return": "off",
"node/no-missing-import": ["error", { "tryExtensions": [".js", ".json", ".ts"] }],
"node/no-unpublished-import": "off",
"node/no-unpublished-require": "off",
"node/no-unsupported-features/es-syntax": "off",
"no-lonely-if": "off",
"node/shebang": "off",
"object-curly-newline": "off",
"prefer-destructuring": "off",
"prefer-template":"off",
"promise/always-return": "off",
"promise/catch-or-return": "off",
"promise/no-nesting": "off",
"radix": "off"
}
}

13
.github/FUNDING.yml vendored
View File

@ -1,13 +0,0 @@
# These are supported funding model platforms
github: [vladmandic]
patreon: # Replace with a single Patreon username
open_collective: # Replace with a single Open Collective username
ko_fi: # Replace with a single Ko-fi username
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
liberapay: # Replace with a single Liberapay username
issuehunt: # Replace with a single IssueHunt username
otechie: # Replace with a single Otechie username
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']

1
.github/ISSUE_TEMPLATE/ISSUE.md vendored Normal file
View File

@ -0,0 +1 @@
Please include output of `faceapi.version` object or specify details about your version and platform (OS, NodeJS version, Browser version).

View File

@ -1,28 +0,0 @@
---
name: Issue
about: Issue
title: ''
labels: ''
assignees: vladmandic
---
**Issue Description**
**Steps to Reproduce**
**Expected Behavior**
**Environment
- Module version?
- Built-in demo or custom code?
- Type of module used (e.g. `js`, `esm`, `esm-nobundle`)?
- Browser or NodeJS and version (e.g. NodeJS 14.15 or Chrome 89)?
- OS and Hardware platform (e.g. Windows 10, Ubuntu Linux on x64, Android 10)?
- Packager (if any) (e.g, webpack, rollup, parcel, esbuild, etc.)?
**Additional**
- For installation or startup issues include your `package.json`
- For usage issues, it is recommended to post your code as [gist](https://gist.github.com/)

View File

@ -1,3 +0,0 @@
# Pull Request Template
<br>

View File

@ -1,67 +0,0 @@
# For most projects, this workflow file will not need changing; you simply need
# to commit it to your repository.
#
# You may wish to alter this file to override the set of languages analyzed,
# or to provide custom queries or build logic.
#
# ******** NOTE ********
# We have attempted to detect the languages in your repository. Please check
# the `language` matrix defined below to confirm you have the correct set of
# supported CodeQL languages.
#
name: "CodeQL"
on:
push:
branches: [ master ]
pull_request:
# The branches below must be a subset of the branches above
branches: [ master ]
schedule:
- cron: '21 6 * * 0'
jobs:
analyze:
name: Analyze
runs-on: ubuntu-latest
strategy:
fail-fast: false
matrix:
language: [ 'javascript' ]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python' ]
# Learn more:
# https://docs.github.com/en/free-pro-team@latest/github/finding-security-vulnerabilities-and-errors-in-your-code/configuring-code-scanning#changing-the-languages-that-are-analyzed
steps:
- name: Checkout repository
uses: actions/checkout@v2
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v1
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# queries: ./path/to/local/query, your-org/your-repo/queries@main
# Autobuild attempts to build any compiled languages (C/C++, C#, or Java).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v1
# Command-line programs to run using the OS shell.
# 📚 https://git.io/JvXDl
# ✏️ If the Autobuild fails above, remove it and uncomment the following three lines
# and modify them (or add more) to build your code if your project
# uses a compiled language
#- run: |
# make bootstrap
# make release
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v1

1
.gitignore vendored
View File

@ -1,2 +1 @@
node_modules
pnpm-lock.yaml

13
.hintrc
View File

@ -1,13 +0,0 @@
{
"extends": [
"web-recommended"
],
"browserslist": [
"last 1 versions",
"not ie < 20"
],
"hints": {
"no-inline-styles": "off",
"meta-charset-utf-8": "off"
}
}

View File

@ -1,7 +0,0 @@
{
"MD012": false,
"MD013": false,
"MD033": false,
"MD036": false,
"MD041": false
}

View File

@ -1,5 +0,0 @@
node_modules
pnpm-lock.yaml
typedoc
test
types/lib

5
.npmrc
View File

@ -1,5 +0,0 @@
force=true
production=true
legacy-peer-deps=true
strict-peer-dependencies=false
node-options='--no-deprecation'

View File

@ -1,3 +0,0 @@
{
"typescript.tsdk": "node_modules/typescript/lib"
}

View File

@ -1,473 +0,0 @@
# @vladmandic/face-api
Version: **1.7.15**
Description: **FaceAPI: AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS**
Author: **Vladimir Mandic <mandic00@live.com>**
License: **MIT**
Repository: **<https://github.com/vladmandic/face-api>**
## Changelog
### **1.7.15** 2025/02/05 mandic00@live.com
### **origin/master** 2024/09/10 mandic00@live.com
### **1.7.14** 2024/09/10 mandic00@live.com
- rebuild
- merge pull request #188 from rebser/master
- fixing leaking eventhandlers when using htmlcanvaselement
- rebuild types
- rebuild
### **1.7.13** 2024/01/17 mandic00@live.com
- merge pull request #186 from khwalkowicz/master
- feat: enable noimplicitany
### **release: 1.7.12** 2023/06/12 mandic00@live.com
### **1.7.12** 2023/06/12 mandic00@live.com
### **1.7.11** 2023/05/08 mandic00@live.com
### **1.7.10** 2023/03/21 mandic00@live.com
- change typedefs
### **1.7.9** 2023/01/29 mandic00@live.com
### **1.7.8** 2023/01/06 mandic00@live.com
### **1.7.7** 2022/12/01 mandic00@live.com
### **1.7.6** 2022/10/18 mandic00@live.com
- fix face angles (yaw, pitch, & roll) accuracy (#130)
### **1.7.5** 2022/10/09 mandic00@live.com
- create funding.yml
- add node-wasm demo
### **1.7.4** 2022/09/25 mandic00@live.com
- improve face compare performance
### **1.7.3** 2022/08/24 mandic00@live.com
- refresh release
### **1.7.2** 2022/08/23 mandic00@live.com
- document and remove optional dependencies
### **release: 1.7.1** 2022/07/25 mandic00@live.com
### **1.7.1** 2022/07/25 mandic00@live.com
- refactor dependencies
- full rebuild
### **1.6.11** 2022/05/24 mandic00@live.com
### **1.6.10** 2022/05/24 mandic00@live.com
### **1.6.9** 2022/05/18 mandic00@live.com
### **1.6.8** 2022/05/09 mandic00@live.com
- exclude impossible detected face boxes
### **1.6.7** 2022/04/01 mandic00@live.com
- fixed typo error (#97)
### **1.6.6** 2022/03/04 mandic00@live.com
### **1.6.5** 2022/02/07 mandic00@live.com
### **1.6.4** 2022/01/14 mandic00@live.com
- add node with wasm build target
### **1.6.3** 2022/01/06 mandic00@live.com
### **1.6.2** 2022/01/01 mandic00@live.com
### **1.6.1** 2021/12/09 mandic00@live.com
- rebuild
- release preview
- switch to custom tfjs and new typedefs
- rebuild
### **1.5.8** 2021/11/30 mandic00@live.com
### **1.5.7** 2021/10/28 mandic00@live.com
### **1.5.6** 2021/10/22 mandic00@live.com
### **release: 1.5.5** 2021/10/19 mandic00@live.com
### **1.5.5** 2021/10/19 mandic00@live.com
- allow backend change in demo via url params
- add node-match demo
- fix face matcher
### **1.5.4** 2021/09/29 mandic00@live.com
### **1.5.3** 2021/09/16 mandic00@live.com
- simplify tfjs imports
- reduce bundle size
- enable webgl uniforms
### **1.5.2** 2021/09/10 mandic00@live.com
- redesign build platform
### **1.5.1** 2021/09/08 mandic00@live.com
### **1.4.2** 2021/08/31 mandic00@live.com
### **release: 1.4.1** 2021/07/29 mandic00@live.com
### **1.4.1** 2021/07/29 mandic00@live.com
### **release: 1.3.1** 2021/06/18 mandic00@live.com
### **1.3.1** 2021/06/08 mandic00@live.com
- fix face expression detection (#56)
- add buffertovideo
- fix git conflicts
- fix tsc error (#55)
- force typescript 4.2 due to typedoc incompatibility with ts 4.3
### **1.2.5** 2021/05/27 mandic00@live.com
- add buffertovideo and fetchvideo (#54)
### **1.2.4** 2021/05/18 mandic00@live.com
### **1.2.3** 2021/05/04 mandic00@live.com
### **update for tfjs 3.6.0** 2021/04/30 mandic00@live.com
### **1.2.2** 2021/04/30 mandic00@live.com
- add node-wasm demo
- accept uri as input to demo node and node-canvas
- major version full rebuild
### **1.2.1** 2021/04/22 mandic00@live.com
- add npmrc
- add canvas/image based demo to decode webp
### **1.1.12** 2021/04/13 mandic00@live.com
### **1.1.11** 2021/04/06 mandic00@live.com
- merge pull request #46 from mayankagarwals/demo_latencytest_fix
- fixed bug which led to latency not being measured and wrong output on console for demo
- add cdn links
### **1.1.10** 2021/04/04 mandic00@live.com
- added webhints
### **1.1.9** 2021/04/03 mandic00@live.com
- fix linting and tests
### **1.1.8** 2021/04/01 mandic00@live.com
### **1.1.7** 2021/03/31 mandic00@live.com
- enable minify
### **1.1.6** 2021/03/26 mandic00@live.com
### **1.1.5** 2021/03/23 mandic00@live.com
- add node-canvas demo
- refactoring
### **1.1.4** 2021/03/18 mandic00@live.com
### **1.1.3** 2021/03/16 mandic00@live.com
- fix for seedrandom
### **1.1.2** 2021/03/15 mandic00@live.com
- create templates
- create codeql-analysis.yml
### **1.1.1** 2021/03/14 mandic00@live.com
- full rebuild
- reformatted model manifests and weights
- create api specs
### **1.0.2** 2021/03/09 mandic00@live.com
### **release: 1.0.1** 2021/03/09 mandic00@live.com
### **1.0.1** 2021/03/09 mandic00@live.com
- add badges
- optimize for npm
- 0.30.6
- added typings for face angle
- disable landmark printing
- 0.30.5
- enabled live demo on gitpages
- 0.30.4
- added face angle calculations
- added documentation
- package update
- 0.30.3
- 0.30.2
- 0.30.1
- 0.13.3
- added note-cpu target
- merge pull request #39 from xemle/feature/node-cpu
- add node-cpu build for non supported systems of libtensorflow
- 0.13.2
- 0.13.1
- 0.12.10
- exception handling
- 0.12.9
- exception handling
- 0.12.8
- exception handling
### **0.12.7** 2021/02/17 mandic00@live.com
- 0.12.7
- 0.12.6
- 0.12.5
- 0.12.4
- 0.12.3
- 0.12.2
### **update for tfjs 3.0.0** 2021/01/29 mandic00@live.com
- 0.12.1
- rebuild
- 0.11.6
- add check for null face descriptor
- merge pull request #34 from patrickhulce/patch-1
- fix: return empty descriptor for zero-sized faces
- 0.11.5
- 0.11.4
- 0.11.3
- fix typo
- enable full minification
- 0.11.2
- full rebuild
- 0.11.1
- added live webcam demo
- 0.10.2
- ts linting
- version bump
- 0.10.1
- full re-lint and typings generation
- rebuild
### **0.9.5** 2020/12/19 mandic00@live.com
- added tsc build typings
### **0.9.4** 2020/12/15 mandic00@live.com
- package update
### **0.9.3** 2020/12/12 mandic00@live.com
- remove old demo
- merge branch 'master' of https://github.com/vladmandic/face-api
### **0.9.2** 2020/12/08 mandic00@live.com
- merge pull request #19 from meeki007/patch-3
- remove http reff
- fixed typos
### **0.9.1** 2020/12/02 mandic00@live.com
- redesigned tfjs bundling and build process
- push
- merge pull request #17 from meeki007/patch-2
- merge pull request #16 from meeki007/patch-1
- added link to documentation for js.tensorflow 2.7.0
- add comments and fix typo
### **0.8.9** 2020/11/25 mandic00@live.com
- removed node-fetch dependency
### **0.8.8** 2020/11/03 mandic00@live.com
### **0.8.7** 2020/11/03 mandic00@live.com
- removed type from package.json and added nodejs example
### **0.8.6** 2020/10/29 mandic00@live.com
### **0.8.5** 2020/10/27 mandic00@live.com
### **0.8.4** 2020/10/27 mandic00@live.com
- fix webpack compatibility issue
### **0.8.3** 2020/10/25 mandic00@live.com
### **0.8.2** 2020/10/25 mandic00@live.com
- fix for wasm compatibility
### **0.8.1** 2020/10/15 mandic00@live.com
- added cjs builds
### **0.7.4** 2020/10/14 mandic00@live.com
- added nobundle
### **0.7.3** 2020/10/13 mandic00@live.com
### **0.7.2** 2020/10/13 mandic00@live.com
### **0.7.1** 2020/10/13 mandic00@live.com
- switched to monolithic build
### **0.6.3** 2020/10/12 mandic00@live.com
### **0.6.2** 2020/10/11 mandic00@live.com
### **0.6.1** 2020/10/11 mandic00@live.com
- major update
- tfjs 2.6.0
### **0.5.3** 2020/09/18 cyan00@gmail.com
### **0.5.2** 2020/09/16 cyan00@gmail.com
- added build for node
- upgrade to tfjs@2.4.0 and ts-node@9.0.0
- create issue.md
- added issue template
- added faceapi.version object
### **0.5.1** 2020/09/08 cyan00@gmail.com
### **0.4.6** 2020/09/08 cyan00@gmail.com
- added test fot @tfjs and backends loaded
### **0.4.5** 2020/08/31 cyan00@gmail.com
- adding build
### **0.4.4** 2020/08/30 cyan00@gmail.com
- change build process
### **0.4.3** 2020/08/29 cyan00@gmail.com
- fix node build error
### **0.4.2** 2020/08/29 cyan00@gmail.com
### **0.4.1** 2020/08/27 cyan00@gmail.com
### **0.3.9** 2020/08/27 cyan00@gmail.com
- added example
### **0.3.8** 2020/08/26 cyan00@gmail.com
- re-added ssd_mobilenet
### **0.3.7** 2020/08/22 cyan00@gmail.com
### **0.3.6** 2020/08/21 cyan00@gmail.com
### **0.3.5** 2020/08/19 cyan00@gmail.com
### **0.3.4** 2020/08/19 cyan00@gmail.com
- switch to commonjs and es2018 for compatibility
### **0.3.3** 2020/08/19 cyan00@gmail.com
### **0.3.2** 2020/08/18 cyan00@gmail.com
### **0.3.1** 2020/08/18 cyan00@gmail.com
- uodated build script
- npm publish
- added pre-compiled build
- added pre-bundled dist
- removed unnecessary weights
- initial commit

View File

@ -1,24 +0,0 @@
# Code of Conduct
Use your best judgement
If it will possibly make others uncomfortable, do not post it
- Be respectful
Disagreement is not an opportunity to attack someone else's thoughts or opinions
Although views may differ, remember to approach every situation with patience and care
- Be considerate
Think about how your contribution will affect others in the community
- Be open minded
Embrace new people and new ideas. Our community is continually evolving and we welcome positive change
Be mindful of your language
Any of the following behavior is unacceptable:
- Offensive comments of any kind
- Threats or intimidation
- Sexually explicit material
- Or any other kinds of harassment
If you believe someone is violating the code of conduct, we ask that you report it
Participants asked to stop any harassing behavior are expected to comply immediately

View File

@ -1,17 +0,0 @@
# Contributing Guidelines
Pull requests from everyone are welcome
Procedure for contributing:
- Create a fork of the repository on github
In a top right corner of a GitHub, select "Fork"
- Clone your forked repository to your local system
`git clone https://github.com/<your-username>/<your-fork>
- Make your changes
- Test your changes against code guidelines
`npm run lint`
- Push changes to your fork
- Submit a PR (pull request)
Your pull request will be reviewed and pending review results, merged into main branch

View File

@ -1,6 +1,6 @@
MIT License
Copyright (c) Vladimir Mandic
Copyright (c) 2018 Vincent Mühler
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal

522
README.md
View File

@ -1,519 +1,137 @@
![Git Version](https://img.shields.io/github/package-json/v/vladmandic/face-api?style=flat-square&svg=true&label=git)
![NPM Version](https://img.shields.io/npm/v/@vladmandic/face-api.png?style=flat-square)
![Last Commit](https://img.shields.io/github/last-commit/vladmandic/face-api?style=flat-square?svg=true)
![License](https://img.shields.io/github/license/vladmandic/face-api?style=flat-square?svg=true)
![GitHub Status Checks](https://img.shields.io/github/checks-status/vladmandic/face-api/master?style=flat-square?svg=true)
![Vulnerabilities](https://img.shields.io/snyk/vulnerabilities/github/vladmandic/face-api?style=flat-square?svg=true)
# FaceAPI
**AI-powered Face Detection & Rotation Tracking, Face Description & Recognition, Age & Gender & Emotion Prediction for Browser and NodeJS using TensorFlow/JS**
## Note
<br>
This is updated **face-api.js** with latest available TensorFlow/JS as the original face-api.js is not compatible with **tfjs 2.0+**.
**Live Demo**: <https://vladmandic.github.io/face-api/demo/webcam.html>
Forked from **face-api.js** version **0.22.2** released on March 22nd, 2020
<br>
- <https://github.com/justadudewhohacks/face-api.js>
- <https://www.npmjs.com/package/face-api.js>
## Additional Documentation
## Differences
- [**Tutorial**](TUTORIAL.md)
- [**TypeDoc API Specification**](https://vladmandic.github.io/face-api/typedoc/index.html)
- Removed tests, docs, examples
- Updated all package dependencies
- Compatible with TensorFlow/JS 2.0+
- Updated type casting for TypeScript type checking
- Removed unnecesary package dependencies (karma, jasmine, etc.)
- Typescript build process now targets ES2018 and instead of dual ES5/ES6
- Browser bundle process uses ESBuild instead of Rollup
- New TensorFlow/JS dependencies since backends were removed from @tensorflow/tfjs-core
- Updated mobileNetv1 model due to batchNorm() dependency
- Fully tree shakable when imported as an ESM module
- Added `version` class that returns JSON object with version of FaceAPI as well as linked TFJS
- Added calls for `setPlatform` to automatically prepare TFJS in browser
- Removed following models as they are either obsolete or non-functional with tfjs 2.0+
- mtcnn: Mostly obsolete
- tinyYolov2: Non-functional since weights are missing
<br><hr><br>
## Examples
<br>
### Browser
Browser example that uses static images and showcases both models
as well as all of the extensions is included in `/demo/index.html`
Example can be accessed directly using Git pages using URL:
<https://vladmandic.github.io/face-api/demo/index.html>
Browser example that uses live webcam is included in `/demo/webcam.html`
Example can be accessed directly using Git pages using URL:
<https://vladmandic.github.io/face-api/demo/webcam.html>
<br>
**Demo using FaceAPI to process images**
*Note: Photos shown below are taken by me*
![screenshot](demo/screenshot-images.png)
**Demo using FaceAPI to process live webcam**
![screenshot](demo/screenshot-webcam.png)
<br>
### NodeJS
NodeJS examples are:
- `/demo/node-simple.js`:
Simplest possible NodeJS demo for FaceAPI in under 30 lines of JavaScript code
- `/demo/node.js`:
Using `TFJS` native methods to load images without external dependencies
- `/demo/node-canvas.js` and `/demo/node-image.js`:
Using external `canvas` module to load images
Which also allows for image drawing and saving inside `NodeJS` environment
- `/demo/node-match.js`:
Simple demo that compares face similarity from a given image
to a second image or list of images in a folder
- `/demo/node-multiprocess.js`:
Multiprocessing showcase that uses pool of worker processes
(`node-multiprocess-worker.js`)
Main starts fixed pool of worker processes with each worker having
it's instance of `FaceAPI`
Workers communicate with main when they are ready and main dispaches
job to each ready worker until job queue is empty
```json
2021-03-14 08:42:03 INFO: @vladmandic/face-api version 1.0.2
2021-03-14 08:42:03 INFO: User: vlado Platform: linux Arch: x64 Node: v15.7.0
2021-03-14 08:42:03 INFO: FaceAPI multi-process test
2021-03-14 08:42:03 STATE: Main: started worker: 1888019
2021-03-14 08:42:03 STATE: Main: started worker: 1888025
2021-03-14 08:42:04 STATE: Worker: PID: 1888025 TensorFlow/JS 3.3.0 FaceAPI 1.0.2 Backend: tensorflow
2021-03-14 08:42:04 STATE: Worker: PID: 1888019 TensorFlow/JS 3.3.0 FaceAPI 1.0.2 Backend: tensorflow
2021-03-14 08:42:04 STATE: Main: dispatching to worker: 1888019
2021-03-14 08:42:04 STATE: Main: dispatching to worker: 1888025
2021-03-14 08:42:04 DATA: Worker received message: 1888019 { image: 'demo/sample1.jpg' }
2021-03-14 08:42:04 DATA: Worker received message: 1888025 { image: 'demo/sample2.jpg' }
2021-03-14 08:42:06 DATA: Main: worker finished: 1888025 detected faces: 3
2021-03-14 08:42:06 STATE: Main: dispatching to worker: 1888025
2021-03-14 08:42:06 DATA: Worker received message: 1888025 { image: 'demo/sample3.jpg' }
2021-03-14 08:42:06 DATA: Main: worker finished: 1888019 detected faces: 3
2021-03-14 08:42:06 STATE: Main: dispatching to worker: 1888019
2021-03-14 08:42:06 DATA: Worker received message: 1888019 { image: 'demo/sample4.jpg' }
2021-03-14 08:42:07 DATA: Main: worker finished: 1888025 detected faces: 3
2021-03-14 08:42:07 STATE: Main: dispatching to worker: 1888025
2021-03-14 08:42:07 DATA: Worker received message: 1888025 { image: 'demo/sample5.jpg' }
2021-03-14 08:42:08 DATA: Main: worker finished: 1888019 detected faces: 4
2021-03-14 08:42:08 STATE: Main: dispatching to worker: 1888019
2021-03-14 08:42:08 DATA: Worker received message: 1888019 { image: 'demo/sample6.jpg' }
2021-03-14 08:42:09 DATA: Main: worker finished: 1888025 detected faces: 5
2021-03-14 08:42:09 STATE: Main: worker exit: 1888025 0
2021-03-14 08:42:09 DATA: Main: worker finished: 1888019 detected faces: 4
2021-03-14 08:42:09 INFO: Processed 15 images in 5944 ms
2021-03-14 08:42:09 STATE: Main: worker exit: 1888019 0
```
### NodeJS Notes
- Supported NodeJS versions are **14** up to **22**
NodeJS version **23** and higher are not supported due to incompatibility with TensorFlow/JS
- `@tensorflow/tfjs-node` or `@tensorflow/tfjs-node-gpu`
must be installed before using any **NodeJS** examples
<br><hr><br>
## Quick Start
Simply include latest version of `FaceAPI` directly from a CDN in your HTML:
(pick one, `jsdelivr` or `unpkg`)
```html
<script src="https://cdn.jsdelivr.net/npm/@vladmandic/face-api/dist/face-api.js"></script>
<script src="https://unpkg.dev/@vladmandic/face-api/dist/face-api.js"></script>
```
Which means valid models are **tinyFaceDetector** and **mobileNetv1**
Due to reduced code and changed build process, resulting bundle is about **>5x smaller** than the original!
## Installation
`FaceAPI` ships with several pre-build versions of the library:
There are several ways to use Face-API:
- `dist/face-api.js`: IIFE format for client-side Browser execution
*with* TFJS pre-bundled
- `dist/face-api.esm.js`: ESM format for client-side Browser execution
*with* TFJS pre-bundled
- `dist/face-api.esm-nobundle.js`: ESM format for client-side Browser execution
*without* TFJS pre-bundled
- `dist/face-api.node.js`: CommonJS format for server-side NodeJS execution
*without* TFJS pre-bundled
- `dist/face-api.node-gpu.js`: CommonJS format for server-side NodeJS execution
*without* TFJS pre-bundled and optimized for CUDA GPU acceleration
Defaults are:
```json
{
"main": "dist/face-api.node-js",
"module": "dist/face-api.esm.js",
"browser": "dist/face-api.esm.js",
}
```
Bundled `TFJS` can be used directly via export: `faceapi.tf`
Reason for additional `nobundle` version is if you want to
include a specific version of TFJS and not rely on pre-packaged one
`FaceAPI` is compatible with TFJS 2.0+ and TFJS 3.0+
All versions include `sourcemap`
<br><hr><br>
There are several ways to use FaceAPI:
**Important**: This version of Face-Api does NOT pre-package `TFJS` to allow for faster downloads (it's much smaller) as well as to allow user to choose version of TFJS to use (it's compatible with any `TFJS 2.0+`).
### 1. IIFE script
*Size: 936KB minified*
*Recommened for quick tests and backward compatibility with older Browsers that do not support ESM such as IE*
This is simplest way for usage within Browser
Simply download `dist/face-api.js`, include it in your `HTML` file & it's ready to use:
This is simplest way for usage within Browser
Simply download `dist/face-api.js`, include it in your `HTML` file & it's ready to use.
```html
<script src="https://cdnjs.cloudflare.com/ajax/libs/tensorflow/2.6.0/tf.min.js"></script>
<script src="dist/face-api.js"><script>
```
```
Or skip the download and include it directly from a CDN:
For a quick test, you can access the script directly from `gitpages`
```html
<script src="https://cdn.jsdelivr.net/npm/@vladmandic/face-api/dist/face-api.js"></script>
<script src="https://vladmandic.github.io/face-api/dist/face-api.js"></script>
```
IIFE script bundles TFJS and auto-registers global namespace `faceapi` within Window object which can be accessed directly from a `<script>` tag or from your JS file.
<br>
IIFE script auto-registers global namespace `faceapi` within Window object.
And if you want to access `TensorFlow/JS` classes directly, they are exported as `faceapi.tf`
### 2. ESM module
*Size: 164KB non-minified*
*Recommended for usage within Browser*
#### 2.1. Direct Import
To use ESM import directly in a Browser, you must import your script (e.g. `index.js`) with a `type="module"`
```html
<script src="./index.js" type="module">
```
and then in your `index.js`
If you're using bundler *(such as rollup, webpack, esbuild)* to package your client application, you can import ESM version of FaceAPI which supports full tree shaking
Note that this version does NOT pre-package `TFJS`, so you'll need to include it before you import `FaceAPI`
You can use any version of `TFJS` 2.0+
```js
import * as tf from 'https://cdnjs.cloudflare.com/ajax/libs/tensorflow/2.6.0/tf.min.js'; // load directly from CDN
import * as faceapi from 'dist/face-api.esm.js';
```
*Experimental*:
You could use same syntax within your main `JS` file if it's imported with `<script type="module">`
```html
<script src="tf.min.js">
<script src="./index.js" type="module">
```
and then in `index.js`
```js
import * as tf from 'https://cdnjs.cloudflare.com/ajax/libs/tensorflow/2.6.0/tf.min.js'; // load directly from CDN
import * as faceapi from 'dist/face-api.esm.js';
```
#### 2.2. With Bundler
### 3. NPM module
*Size: 45,104KB unpacked (including sources and pre-trained model weights)*
Same as above, but expectation is that you've installed `@vladmandic/faceapi` package:
Simmilar to ESM module, but with full sources as it points to `build/src/index.js` instead
Recommended for NodeJS projects
Install with:
```shell
npm install @vladmandic/face-api
npm install @tensorflow/tfjs @vladmandic/face-api
```
and that you'll package your application using a bundler such as `webpack`, `rollup` or `esbuild`
in which case, you do not need to import a script as module - that depends on your bundler configuration
And then use with:
```js
import * as tf from '@tensorflow/tfjs';
import * as faceapi from '@vladmandic/face-api';
```
or if your bundler doesn't recognize `recommended` type, force usage with:
```js
import * as faceapi from '@vladmandic/face-api/dist/face-api.esm.js';
```
or to use non-bundled version
```js
import * as tf from `@tensorflow/tfjs`;
import * as faceapi from '@vladmandic/face-api/dist/face-api.esm-nobundle.js';
```
<br>
### 3. NPM module
#### 3.1. Import CommonJS
*Recommended for NodeJS projects*
*Node: FaceAPI for NodeJS does not bundle TFJS due to binary dependencies that are installed during TFJS installation*
Install with:
```shell
npm install @tensorflow/tfjs-node
npm install @vladmandic/face-api
```
And then use with:
```js
const tf = require('@tensorflow/tfjs-node')
const faceapi = require('@vladmandic/face-api');
```
If you want to force CommonJS module instead of relying on `recommended` field:
```js
const faceapi = require('@vladmandic/face-api/dist/face-api.node.js');
```
If you want to GPU Accelerated execution in NodeJS, you must have CUDA libraries already installed and working
Then install appropriate version of `FaceAPI`:
```shell
npm install @tensorflow/tfjs-node-gpu
npm install @vladmandic/face-api
```
And then use with:
```js
const tf = require('@tensorflow/tfjs-node-gpu')
const faceapi = require('@vladmandic/face-api/dist/face-api.node-gpu.js'); // this loads face-api version with correct bindings for tfjs-node-gpu
```
If you want to use `FaceAPI` in a NodeJS on platforms where **tensorflow** binary libraries are not supported, you can use NodeJS **WASM** backend.
```shell
npm install @tensorflow/tfjs
npm install @tensorflow/tfjs-backend-wasm
npm install @vladmandic/face-api
```
And then use with:
```js
const tf = require('@tensorflow/tfjs');
const wasm = require('@tensorflow/tfjs-backend-wasm');
const faceapi = require('@vladmandic/face-api/dist/face-api.node-wasm.js'); // use this when using face-api in dev mode
wasm.setWasmPaths('https://cdn.jsdelivr.net/npm/@tensorflow/tfjs-backend-wasm/dist/');
await tf.setBackend('wasm');
await tf.ready();
...
```
If you want to use graphical functions inside NodeJS,
you must provide appropriate graphical library as
NodeJS does not include implementation for DOM elements
such as HTMLImageElement or HTMLCanvasElement:
Install `Canvas` for NodeJS:
```shell
npm install canvas
```
Patch NodeJS environment to use newly installed `Canvas` library:
```js
const canvas = require('canvas');
const faceapi = require('@vladmandic/face-api');
const { Canvas, Image, ImageData } = canvas
faceapi.env.monkeyPatch({ Canvas, Image, ImageData })
```
<br><hr><br>
## Weights
Pretrained models and their weights are included in `./model`.
<br><hr><br>
## Test & Dev Web Server
To install development dependencies, use `npm install --production=false`
Built-in test&dev web server can be started using
```shell
npm run dev
```
By default it starts HTTP server on port 8000 and HTTPS server on port 8001 and can be accessed as:
- <https://localhost:8001/demo/index.html>
- <https://localhost:8001/demo/webcam.html>
```js
2022-01-14 09:56:19 INFO: @vladmandic/face-api version 1.6.4
2022-01-14 09:56:19 INFO: User: vlado Platform: linux Arch: x64 Node: v17.2.0
2022-01-14 09:56:19 INFO: Application: { name: '@vladmandic/face-api', version: '1.6.4' }
2022-01-14 09:56:19 INFO: Environment: { profile: 'development', config: '.build.json', package: 'package.json', tsconfig: true, eslintrc: true, git: true }
2022-01-14 09:56:19 INFO: Toolchain: { build: '0.6.7', esbuild: '0.14.11', typescript: '4.5.4', typedoc: '0.22.10', eslint: '8.6.0' }
2022-01-14 09:56:19 INFO: Build: { profile: 'development', steps: [ 'serve', 'watch', 'compile' ] }
2022-01-14 09:56:19 STATE: WebServer: { ssl: false, port: 8000, root: '.' }
2022-01-14 09:56:19 STATE: WebServer: { ssl: true, port: 8001, root: '.', sslKey: 'build/cert/https.key', sslCrt: 'build/cert/https.crt' }
2022-01-14 09:56:19 STATE: Watch: { locations: [ 'src/**', 'README.md', 'src/**', 'src/**' ] }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/cpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 143, outputBytes: 1276 }
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/cpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node.js', files: 162, inputBytes: 234787, outputBytes: 175203 }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/gpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-gpu.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 147, outputBytes: 1296 }
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/gpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-gpu.js', files: 162, inputBytes: 234807, outputBytes: 175219 }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/node/wasm', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-wasm.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 185, outputBytes: 1367 }
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/node/wasm', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-wasm.js', files: 162, inputBytes: 234878, outputBytes: 175294 }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/tf-version', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-version.ts', output: 'dist/tfjs.version.js', files: 1, inputBytes: 1063, outputBytes: 1662 }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 2, inputBytes: 2172, outputBytes: 811 }
2022-01-14 09:56:19 STATE: Compile: { name: 'faceapi/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm-nobundle.js', files: 162, inputBytes: 234322, outputBytes: 169437 }
2022-01-14 09:56:19 STATE: Compile: { name: 'tfjs/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 11, inputBytes: 2172, outputBytes: 2444105 }
2022-01-14 09:56:20 STATE: Compile: { name: 'faceapi/browser/iife/bundle', format: 'iife', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.js', files: 162, inputBytes: 2677616, outputBytes: 1252572 }
2022-01-14 09:56:20 STATE: Compile: { name: 'faceapi/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm.js', files: 162, inputBytes: 2677616, outputBytes: 2435063 }
2022-01-14 09:56:20 INFO: Listening...
...
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/html', size: 1047, url: '/', remote: '::1' }
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/javascript', size: 6919, url: '/index.js', remote: '::1' }
2022-01-14 09:56:46 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'text/javascript', size: 2435063, url: '/dist/face-api.esm.js', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 4125244, url: '/dist/face-api.esm.js.map', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 3219, url: '/model/tiny_face_detector_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 193321, url: '/model/tiny_face_detector_model.bin', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 28233, url: '/model/ssd_mobilenetv1_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:47 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 5616957, url: '/model/ssd_mobilenetv1_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 8392, url: '/model/age_gender_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 429708, url: '/model/age_gender_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 8485, url: '/model/face_landmark_68_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 356840, url: '/model/face_landmark_68_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 19615, url: '/model/face_recognition_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 6444032, url: '/model/face_recognition_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/json', size: 6980, url: '/model/face_expression_model-weights_manifest.json', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'application/octet-stream', size: 329468, url: '/model/face_expression_model.bin', remote: '::1' }
2022-01-14 09:56:48 DATA: HTTPS: { method: 'GET', ver: '2.0', status: 200, mime: 'image/jpeg', size: 144516, url: '/sample1.jpg', remote: '::1' }
```
<br><hr><br>
Pretrained models and their weights are includes in `./model`.
## Build
If you want to do a full rebuild, either download npm module
```shell
npm install @vladmandic/face-api
cd node_modules/@vladmandic/face-api
```
or clone a git project
```shell
git clone https://github.com/vladmandic/face-api
cd face-api
```
Then install all dependencies and run rebuild:
```shell
npm install --production=false
npm install
npm run build
```
Build process uses `@vladmandic/build` module that creates optimized build for each target:
Which will compile everything in `./src` into `./build` and create both ESM (standard) and IIFE (minified) bundles as well as sourcemaps in `./dist`
```js
> @vladmandic/face-api@1.7.1 build /home/vlado/dev/face-api
> node build.js
## Documentation
2022-07-25 08:21:05 INFO: Application: { name: '@vladmandic/face-api', version: '1.7.1' }
2022-07-25 08:21:05 INFO: Environment: { profile: 'production', config: '.build.json', package: 'package.json', tsconfig: true, eslintrc: true, git: true }
2022-07-25 08:21:05 INFO: Toolchain: { build: '0.7.7', esbuild: '0.14.50', typescript: '4.7.4', typedoc: '0.23.9', eslint: '8.20.0' }
2022-07-25 08:21:05 INFO: Build: { profile: 'production', steps: [ 'clean', 'compile', 'typings', 'typedoc', 'lint', 'changelog' ] }
2022-07-25 08:21:05 STATE: Clean: { locations: [ 'dist/*', 'typedoc/*', 'types/lib/src' ] }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/cpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 143, outputBytes: 614 }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/cpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node.js', files: 162, inputBytes: 234137, outputBytes: 85701 }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/gpu', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-gpu.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 147, outputBytes: 618 }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/gpu', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-gpu.js', files: 162, inputBytes: 234141, outputBytes: 85705 }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/node/wasm', format: 'cjs', platform: 'node', input: 'src/tfjs/tf-node-wasm.ts', output: 'dist/tfjs.esm.js', files: 1, inputBytes: 185, outputBytes: 670 }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/node/wasm', format: 'cjs', platform: 'node', input: 'src/index.ts', output: 'dist/face-api.node-wasm.js', files: 162, inputBytes: 234193, outputBytes: 85755 }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/tf-version', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-version.ts', output: 'dist/tfjs.version.js', files: 1, inputBytes: 1063, outputBytes: 400 }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 2, inputBytes: 910, outputBytes: 527 }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/esm/nobundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm-nobundle.js', files: 162, inputBytes: 234050, outputBytes: 82787 }
2022-07-25 08:21:05 STATE: Compile: { name: 'tfjs/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/tfjs/tf-browser.ts', output: 'dist/tfjs.esm.js', files: 11, inputBytes: 910, outputBytes: 1184871 }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/iife/bundle', format: 'iife', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.js', files: 162, inputBytes: 1418394, outputBytes: 1264631 }
2022-07-25 08:21:05 STATE: Compile: { name: 'faceapi/browser/esm/bundle', format: 'esm', platform: 'browser', input: 'src/index.ts', output: 'dist/face-api.esm.js', files: 162, inputBytes: 1418394, outputBytes: 1264150 }
2022-07-25 08:21:07 STATE: Typings: { input: 'src/index.ts', output: 'types/lib', files: 93 }
2022-07-25 08:21:09 STATE: TypeDoc: { input: 'src/index.ts', output: 'typedoc', objects: 154, generated: true }
2022-07-25 08:21:13 STATE: Lint: { locations: [ 'src/' ], files: 174, errors: 0, warnings: 0 }
2022-07-25 08:21:14 STATE: ChangeLog: { repository: 'https://github.com/vladmandic/face-api', branch: 'master', output: 'CHANGELOG.md' }
2022-07-25 08:21:14 INFO: Done...
2022-07-25 08:21:14 STATE: Copy: { input: 'types/lib/dist/tfjs.esm.d.ts' }
2022-07-25 08:21:15 STATE: API-Extractor: { succeeeded: true, errors: 0, warnings: 417 }
2022-07-25 08:21:15 INFO: FaceAPI Build complete...
```
For documentation refer to original project at <https://github.com/justadudewhohacks/face-api.js>
<br><hr><br>
## Example
## Face Mesh
Single new example that uses both models as well as all of the extensions is included in `/example/index.html`
Example can be accessed directly using Git pages using URL: <https://vladmandic.github.io/face-api/example/>
`FaceAPI` landmark model returns 68-point face mesh as detailed in the image below:
*Note: Photos shown below are taken by me*
![facemesh](demo/facemesh.png)
<br><hr><br>
## Note
This is updated **face-api.js** with latest available TensorFlow/JS as the original is not compatible with **tfjs >=2.0**.
Forked from [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2** which was released on March 22nd, 2020
*Why?* I needed a FaceAPI that does not cause version conflict with newer versions of TensorFlow
And since the original FaceAPI was open-source, I've released this version as well
Changes ended up being too large for a simple pull request and it ended up being a full-fledged version on its own
Plus many features were added since the original inception
Although a lot of work has gone into this version of `FaceAPI` and it will continue to be maintained,
at this time it is completely superseded by my newer library `Human` which covers the same use cases,
but extends it with newer AI models, additional detection details, compatibility with latest web standard and more
- [Human NPM](https://www.npmjs.com/package/@vladmandic/human)
- [Human Git Repository](https://github.com/vladmandic/human)
<br>
## Differences
Compared to [face-api.js](https://github.com/justadudewhohacks/face-api.js) version **0.22.2**:
- Compatible with `TensorFlow/JS 2.0+, 3.0+ and 4.0+`
Currently using **`TensorFlow/JS` 4.16**
Original `face-api.js` is based on `TFJS` **1.7.4**
- Compatible with `WebGL`, `CPU` and `WASM` TFJS Browser backends
- Compatible with both `tfjs-node` and `tfjs-node-gpu` TFJS NodeJS backends
- Updated all type castings for TypeScript type checking to `TypeScript 5.3`
- Switched bundling from `UMD` to `ESM` + `CommonJS` with fallback to `IIFE`
Resulting code is optimized per-platform instead of being universal
Fully tree shakable when imported as an `ESM` module
Browser bundle process uses `ESBuild` instead of `Rollup`
- Added separate `face-api` versions with `tfjs` pre-bundled and without `tfjs`
When using `-nobundle` version, user can load any version of `tfjs` manually
- Typescript build process now targets `ES2018` and instead of dual `ES5`/`ES6`
Resulting code is clean ES2018 JavaScript without polyfills
- Removed old tests, docs, examples
- Removed old package dependencies (`karma`, `jasmine`, `babel`, etc.)
- Updated all package dependencies
- Updated TensorFlow/JS dependencies since backends were removed from `@tensorflow/tfjs-core`
- Updated `mobileNetv1` model due to `batchNorm()` dependency
- Added `version` class that returns JSON object with version of FaceAPI as well as linked TFJS
- Added test/dev built-in HTTP & HTTPS Web server
- Removed `mtcnn` and `tinyYolov2` models as they were non-functional in latest public version of `FaceAPI`
Which means valid models are **tinyFaceDetector** and **mobileNetv1**
*If there is a demand, I can re-implement them back.*
- Added `face angle` calculations that returns `roll`, `yaw` and `pitch`
- Added `typdoc` automatic API specification generation during build
- Added `changelog` automatic generation during build
- New process to generate **TypeDocs** bundle using API-Extractor
<br>
## Credits
- Original project: [face-api.js](https://github.com/justadudewhohacks/face-api.js)
- Original model weighs: [face-api.js-models](https://github.com/justadudewhohacks/face-api.js-models)
- ML API Documentation: [Tensorflow/JS](https://js.tensorflow.org/api/latest/)
<br>
![Stars](https://img.shields.io/github/stars/vladmandic/face-api?style=flat-square?svg=true)
![Forks](https://badgen.net/github/forks/vladmandic/face-api)
![Code Size](https://img.shields.io/github/languages/code-size/vladmandic/face-api?style=flat-square?svg=true)
![CDN](https://data.jsdelivr.com/v1/package/npm/@vladmandic/face-api/badge)<br>
![Downloads](https://img.shields.io/npm/dw/@vladmandic/face-api.png?style=flat-square)
![Downloads](https://img.shields.io/npm/dm/@vladmandic/face-api.png?style=flat-square)
![Downloads](https://img.shields.io/npm/dy/@vladmandic/face-api.png?style=flat-square)
![alt text](example/screenshot.png)

View File

@ -1,5 +0,0 @@
# Security Policy
All issues are tracked publicly on GitHub
Entire code base and indluded dependencies is automatically scanned against known security vulnerabilities

View File

@ -1,3 +0,0 @@
# To-do List for FaceAPI
N/A

View File

@ -1,747 +0,0 @@
# FaceAPI Tutorial
## Features
* Face Recognition
* Face Landmark Detection
* Face Expression Recognition
* Age Estimation & Gender Recognition
<br>
## Table of Contents
* **[Usage](#getting-started)**
* **[Loading the Models](#getting-started-loading-models)**
* **[High Level API](#high-level-api)**
* **[Displaying Detection Results](#getting-started-displaying-detection-results)**
* **[Face Detection Options](#getting-started-face-detection-options)**
* **[Utility Classes](#getting-started-utility-classes)**
* **[Other Useful Utility](#other-useful-utility)**
* **[Available Models](#models)**
* **[Face Detection](#models-face-detection)**
* **[Face Landmark Detection](#models-face-landmark-detection)**
* **[Face Recognition](#models-face-recognition)**
* **[Face Expression Recognition](#models-face-expression-recognition)**
* **[Age Estimation and Gender Recognition](#models-age-and-gender-recognition)**
* **[API Documentation](https://justadudewhohacks.github.io/face-api.js/docs/globals.html)**
<br><hr><br>
<a name="getting-started"></a>
## Getting Started
<a name="getting-started-loading-models"></a>
### Loading the Models
All global neural network instances are exported via faceapi.nets:
```js
console.log(faceapi.nets)
// ageGenderNet
// faceExpressionNet
// faceLandmark68Net
// faceLandmark68TinyNet
// faceRecognitionNet
// ssdMobilenetv1
// tinyFaceDetector
// tinyYolov2
```
To load a model, you have to provide the corresponding manifest.json file as well as the model weight files (shards) as assets. Simply copy them to your public or assets folder. The manifest.json and shard files of a model have to be located in the same directory / accessible under the same route.
Assuming the models reside in **public/models**:
```js
await faceapi.nets.ssdMobilenetv1.loadFromUri('/models')
// accordingly for the other models:
// await faceapi.nets.faceLandmark68Net.loadFromUri('/models')
// await faceapi.nets.faceRecognitionNet.loadFromUri('/models')
// ...
```
In a nodejs environment you can furthermore load the models directly from disk:
```js
await faceapi.nets.ssdMobilenetv1.loadFromDisk('./models')
```
You can also load the model from a tf.NamedTensorMap:
```js
await faceapi.nets.ssdMobilenetv1.loadFromWeightMap(weightMap)
```
Alternatively, you can also create own instances of the neural nets:
```js
const net = new faceapi.SsdMobilenetv1()
await net.loadFromUri('/models')
```
You can also load the weights as a Float32Array (in case you want to use the uncompressed models):
```js
// using fetch
net.load(await faceapi.fetchNetWeights('/models/face_detection_model.weights'))
// using axios
const res = await axios.get('/models/face_detection_model.weights', { responseType: 'arraybuffer' })
const weights = new Float32Array(res.data)
net.load(weights)
```
<a name="getting-high-level-api"></a>
### High Level API
In the following **input** can be an HTML img, video or canvas element or the id of that element.
``` html
<img id="myImg" src="images/example.png" />
<video id="myVideo" src="media/example.mp4" />
<canvas id="myCanvas" />
```
```js
const input = document.getElementById('myImg')
// const input = document.getElementById('myVideo')
// const input = document.getElementById('myCanvas')
// or simply:
// const input = 'myImg'
```
### Detecting Faces
Detect all faces in an image. Returns **Array<[FaceDetection](#interface-face-detection)>**:
```js
const detections = await faceapi.detectAllFaces(input)
```
Detect the face with the highest confidence score in an image. Returns **[FaceDetection](#interface-face-detection) | undefined**:
```js
const detection = await faceapi.detectSingleFace(input)
```
By default **detectAllFaces** and **detectSingleFace** utilize the SSD Mobilenet V1 Face Detector. You can specify the face detector by passing the corresponding options object:
```js
const detections1 = await faceapi.detectAllFaces(input, new faceapi.SsdMobilenetv1Options())
const detections2 = await faceapi.detectAllFaces(input, new faceapi.TinyFaceDetectorOptions())
```
You can tune the options of each face detector as shown [here](#getting-started-face-detection-options).
#### Detecting 68 Face Landmark Points
**After face detection, we can furthermore predict the facial landmarks for each detected face as follows:**
Detect all faces in an image + computes 68 Point Face Landmarks for each detected face. Returns **Array<[WithFaceLandmarks<WithFaceDetection<{}>>](#getting-started-utility-classes)>**:
```js
const detectionsWithLandmarks = await faceapi.detectAllFaces(input).withFaceLandmarks()
```
Detect the face with the highest confidence score in an image + computes 68 Point Face Landmarks for that face. Returns **[WithFaceLandmarks<WithFaceDetection<{}>>](#getting-started-utility-classes) | undefined**:
```js
const detectionWithLandmarks = await faceapi.detectSingleFace(input).withFaceLandmarks()
```
You can also specify to use the tiny model instead of the default model:
```js
const useTinyModel = true
const detectionsWithLandmarks = await faceapi.detectAllFaces(input).withFaceLandmarks(useTinyModel)
```
#### Computing Face Descriptors
**After face detection and facial landmark prediction the face descriptors for each face can be computed as follows:**
Detect all faces in an image + compute 68 Point Face Landmarks for each detected face. Returns **Array<[WithFaceDescriptor<WithFaceLandmarks<WithFaceDetection<{}>>>](#getting-started-utility-classes)>**:
```js
const results = await faceapi.detectAllFaces(input).withFaceLandmarks().withFaceDescriptors()
```
Detect the face with the highest confidence score in an image + compute 68 Point Face Landmarks and face descriptor for that face. Returns **[WithFaceDescriptor<WithFaceLandmarks<WithFaceDetection<{}>>>](#getting-started-utility-classes) | undefined**:
```js
const result = await faceapi.detectSingleFace(input).withFaceLandmarks().withFaceDescriptor()
```
#### Recognizing Face Expressions
**Face expression recognition can be performed for detected faces as follows:**
Detect all faces in an image + recognize face expressions of each face. Returns **Array<[WithFaceExpressions<WithFaceLandmarks<WithFaceDetection<{}>>>](#getting-started-utility-classes)>**:
```js
const detectionsWithExpressions = await faceapi.detectAllFaces(input).withFaceLandmarks().withFaceExpressions()
```
Detect the face with the highest confidence score in an image + recognize the face expressions for that face. Returns **[WithFaceExpressions<WithFaceLandmarks<WithFaceDetection<{}>>>](#getting-started-utility-classes) | undefined**:
```js
const detectionWithExpressions = await faceapi.detectSingleFace(input).withFaceLandmarks().withFaceExpressions()
```
**You can also skip .withFaceLandmarks(), which will skip the face alignment step (less stable accuracy):**
Detect all faces without face alignment + recognize face expressions of each face. Returns **Array<[WithFaceExpressions<WithFaceDetection<{}>>](#getting-started-utility-classes)>**:
```js
const detectionsWithExpressions = await faceapi.detectAllFaces(input).withFaceExpressions()
```
Detect the face with the highest confidence score without face alignment + recognize the face expression for that face. Returns **[WithFaceExpressions<WithFaceDetection<{}>>](#getting-started-utility-classes) | undefined**:
```js
const detectionWithExpressions = await faceapi.detectSingleFace(input).withFaceExpressions()
```
#### Age Estimation and Gender Recognition
**Age estimation and gender recognition from detected faces can be done as follows:**
Detect all faces in an image + estimate age and recognize gender of each face. Returns **Array<[WithAge<WithGender<WithFaceLandmarks<WithFaceDetection<{}>>>>](#getting-started-utility-classes)>**:
```js
const detectionsWithAgeAndGender = await faceapi.detectAllFaces(input).withFaceLandmarks().withAgeAndGender()
```
Detect the face with the highest confidence score in an image + estimate age and recognize gender for that face. Returns **[WithAge<WithGender<WithFaceLandmarks<WithFaceDetection<{}>>>>](#getting-started-utility-classes) | undefined**:
```js
const detectionWithAgeAndGender = await faceapi.detectSingleFace(input).withFaceLandmarks().withAgeAndGender()
```
**You can also skip .withFaceLandmarks(), which will skip the face alignment step (less stable accuracy):**
Detect all faces without face alignment + estimate age and recognize gender of each face. Returns **Array<[WithAge<WithGender<WithFaceDetection<{}>>>](#getting-started-utility-classes)>**:
```js
const detectionsWithAgeAndGender = await faceapi.detectAllFaces(input).withAgeAndGender()
```
Detect the face with the highest confidence score without face alignment + estimate age and recognize gender for that face. Returns **[WithAge<WithGender<WithFaceDetection<{}>>>](#getting-started-utility-classes) | undefined**:
```js
const detectionWithAgeAndGender = await faceapi.detectSingleFace(input).withAgeAndGender()
```
#### Composition of Tasks
**Tasks can be composed as follows:**
```js
// all faces
await faceapi.detectAllFaces(input)
await faceapi.detectAllFaces(input).withFaceExpressions()
await faceapi.detectAllFaces(input).withFaceLandmarks()
await faceapi.detectAllFaces(input).withFaceLandmarks().withFaceExpressions()
await faceapi.detectAllFaces(input).withFaceLandmarks().withFaceExpressions().withFaceDescriptors()
await faceapi.detectAllFaces(input).withFaceLandmarks().withAgeAndGender().withFaceDescriptors()
await faceapi.detectAllFaces(input).withFaceLandmarks().withFaceExpressions().withAgeAndGender().withFaceDescriptors()
// single face
await faceapi.detectSingleFace(input)
await faceapi.detectSingleFace(input).withFaceExpressions()
await faceapi.detectSingleFace(input).withFaceLandmarks()
await faceapi.detectSingleFace(input).withFaceLandmarks().withFaceExpressions()
await faceapi.detectSingleFace(input).withFaceLandmarks().withFaceExpressions().withFaceDescriptor()
await faceapi.detectSingleFace(input).withFaceLandmarks().withAgeAndGender().withFaceDescriptor()
await faceapi.detectSingleFace(input).withFaceLandmarks().withFaceExpressions().withAgeAndGender().withFaceDescriptor()
```
#### Face Recognition by Matching Descriptors
To perform face recognition, one can use faceapi.FaceMatcher to compare reference face descriptors to query face descriptors.
First, we initialize the FaceMatcher with the reference data, for example we can simply detect faces in a **referenceImage** and match the descriptors of the detected faces to faces of subsequent images:
```js
const results = await faceapi
.detectAllFaces(referenceImage)
.withFaceLandmarks()
.withFaceDescriptors()
if (!results.length) {
return
}
// create FaceMatcher with automatically assigned labels
// from the detection results for the reference image
const faceMatcher = new faceapi.FaceMatcher(results)
```
Now we can recognize a persons face shown in **queryImage1**:
```js
const singleResult = await faceapi
.detectSingleFace(queryImage1)
.withFaceLandmarks()
.withFaceDescriptor()
if (singleResult) {
const bestMatch = faceMatcher.findBestMatch(singleResult.descriptor)
console.log(bestMatch.toString())
}
```
Or we can recognize all faces shown in **queryImage2**:
```js
const results = await faceapi
.detectAllFaces(queryImage2)
.withFaceLandmarks()
.withFaceDescriptors()
results.forEach(fd => {
const bestMatch = faceMatcher.findBestMatch(fd.descriptor)
console.log(bestMatch.toString())
})
```
You can also create labeled reference descriptors as follows:
```js
const labeledDescriptors = [
new faceapi.LabeledFaceDescriptors(
'obama',
[descriptorObama1, descriptorObama2]
),
new faceapi.LabeledFaceDescriptors(
'trump',
[descriptorTrump]
)
]
const faceMatcher = new faceapi.FaceMatcher(labeledDescriptors)
```
<a name="getting-started-displaying-detection-results"></a>
### Displaying Detection Results
Preparing the overlay canvas:
```js
const displaySize = { width: input.width, height: input.height }
// resize the overlay canvas to the input dimensions
const canvas = document.getElementById('overlay')
faceapi.matchDimensions(canvas, displaySize)
```
face-api.js predefines some highlevel drawing functions, which you can utilize:
```js
/* Display detected face bounding boxes */
const detections = await faceapi.detectAllFaces(input)
// resize the detected boxes in case your displayed image has a different size than the original
const resizedDetections = faceapi.resizeResults(detections, displaySize)
// draw detections into the canvas
faceapi.draw.drawDetections(canvas, resizedDetections)
/* Display face landmarks */
const detectionsWithLandmarks = await faceapi
.detectAllFaces(input)
.withFaceLandmarks()
// resize the detected boxes and landmarks in case your displayed image has a different size than the original
const resizedResults = faceapi.resizeResults(detectionsWithLandmarks, displaySize)
// draw detections into the canvas
faceapi.draw.drawDetections(canvas, resizedResults)
// draw the landmarks into the canvas
faceapi.draw.drawFaceLandmarks(canvas, resizedResults)
/* Display face expression results */
const detectionsWithExpressions = await faceapi
.detectAllFaces(input)
.withFaceLandmarks()
.withFaceExpressions()
// resize the detected boxes and landmarks in case your displayed image has a different size than the original
const resizedResults = faceapi.resizeResults(detectionsWithExpressions, displaySize)
// draw detections into the canvas
faceapi.draw.drawDetections(canvas, resizedResults)
// draw a textbox displaying the face expressions with minimum probability into the canvas
const minProbability = 0.05
faceapi.draw.drawFaceExpressions(canvas, resizedResults, minProbability)
```
You can also draw boxes with custom text ([DrawBox](https://github.com/justadudewhohacks/tfjs-image-recognition-base/blob/master/src/draw/DrawBox.ts)):
```js
const box = { x: 50, y: 50, width: 100, height: 100 }
// see DrawBoxOptions below
const drawOptions = {
label: 'Hello I am a box!',
lineWidth: 2
}
const drawBox = new faceapi.draw.DrawBox(box, drawOptions)
drawBox.draw(document.getElementById('myCanvas'))
```
DrawBox drawing options:
```js
export interface IDrawBoxOptions {
boxColor?: string
lineWidth?: number
drawLabelOptions?: IDrawTextFieldOptions
label?: string
}
```
Finally you can draw custom text fields ([DrawTextField](https://github.com/justadudewhohacks/tfjs-image-recognition-base/blob/master/src/draw/DrawTextField.ts)):
```js
const text = [
'This is a textline!',
'This is another textline!'
]
const anchor = { x: 200, y: 200 }
// see DrawTextField below
const drawOptions = {
anchorPosition: 'TOP_LEFT',
backgroundColor: 'rgba(0, 0, 0, 0.5)'
}
const drawBox = new faceapi.draw.DrawTextField(text, anchor, drawOptions)
drawBox.draw(document.getElementById('myCanvas'))
```
DrawTextField drawing options:
```js
export interface IDrawTextFieldOptions {
anchorPosition?: AnchorPosition
backgroundColor?: string
fontColor?: string
fontSize?: number
fontStyle?: string
padding?: number
}
export enum AnchorPosition {
TOP_LEFT = 'TOP_LEFT',
TOP_RIGHT = 'TOP_RIGHT',
BOTTOM_LEFT = 'BOTTOM_LEFT',
BOTTOM_RIGHT = 'BOTTOM_RIGHT'
}
```
<a name="getting-started-face-detection-options"></a>
### Face Detection Options
#### SsdMobilenetv1Options
```js
export interface ISsdMobilenetv1Options {
// minimum confidence threshold
// default: 0.5
minConfidence?: number
// maximum number of faces to return
// default: 100
maxResults?: number
}
// example
const options = new faceapi.SsdMobilenetv1Options({ minConfidence: 0.8 })
```
#### TinyFaceDetectorOptions
```js
export interface ITinyFaceDetectorOptions {
// size at which image is processed, the smaller the faster,
// but less precise in detecting smaller faces, must be divisible
// by 32, common sizes are 128, 160, 224, 320, 416, 512, 608,
// for face tracking via webcam I would recommend using smaller sizes,
// e.g. 128, 160, for detecting smaller faces use larger sizes, e.g. 512, 608
// default: 416
inputSize?: number
// minimum confidence threshold
// default: 0.5
scoreThreshold?: number
}
// example
const options = new faceapi.TinyFaceDetectorOptions({ inputSize: 320 })
```
<a name="getting-started-utility-classes"></a>
### Utility Classes
#### IBox
```js
export interface IBox {
x: number
y: number
width: number
height: number
}
```
#### IFaceDetection
```js
export interface IFaceDetection {
score: number
box: Box
}
```
#### IFaceLandmarks
```js
export interface IFaceLandmarks {
positions: Point[]
shift: Point
}
```
#### WithFaceDetection
```js
export type WithFaceDetection<TSource> = TSource & {
detection: FaceDetection
}
```
#### WithFaceLandmarks
```js
export type WithFaceLandmarks<TSource> = TSource & {
unshiftedLandmarks: FaceLandmarks
landmarks: FaceLandmarks
alignedRect: FaceDetection
angle: { roll: number, yaw: number, pitch: number }
// for angle all values are in radians in range of -pi/2 to pi/2 which is -90 to +90 degrees
// value of 0 means center
}
```
#### WithFaceDescriptor
```js
export type WithFaceDescriptor<TSource> = TSource & {
descriptor: Float32Array
}
```
#### WithFaceExpressions
```js
export type WithFaceExpressions<TSource> = TSource & {
expressions: FaceExpressions
}
```
#### WithAge
```js
export type WithAge<TSource> = TSource & {
age: number
}
```
#### WithGender
```js
export type WithGender<TSource> = TSource & {
gender: Gender
genderProbability: number
}
export enum Gender {
FEMALE = 'female',
MALE = 'male'
}
```
<a name="getting-started-other-useful-utility"></a>
### Other Useful Utility
#### Using the Low Level API
Instead of using the high level API, you can directly use the forward methods of each neural network:
```js
const detections1 = await faceapi.ssdMobilenetv1(input, options)
const detections2 = await faceapi.tinyFaceDetector(input, options)
const landmarks1 = await faceapi.detectFaceLandmarks(faceImage)
const landmarks2 = await faceapi.detectFaceLandmarksTiny(faceImage)
const descriptor = await faceapi.computeFaceDescriptor(alignedFaceImage)
```
#### Extracting a Canvas for an Image Region
```js
const regionsToExtract = [
new faceapi.Rect(0, 0, 100, 100)
]
// actually extractFaces is meant to extract face regions from bounding boxes
// but you can also use it to extract any other region
const canvases = await faceapi.extractFaces(input, regionsToExtract)
```
#### Euclidean Distance
```js
// ment to be used for computing the euclidean distance between two face descriptors
const dist = faceapi.euclideanDistance([0, 0], [0, 10])
console.log(dist) // 10
```
#### Retrieve the Face Landmark Points and Contours
```js
const landmarkPositions = landmarks.positions
// or get the positions of individual contours,
// only available for 68 point face ladnamrks (FaceLandmarks68)
const jawOutline = landmarks.getJawOutline()
const nose = landmarks.getNose()
const mouth = landmarks.getMouth()
const leftEye = landmarks.getLeftEye()
const rightEye = landmarks.getRightEye()
const leftEyeBbrow = landmarks.getLeftEyeBrow()
const rightEyeBrow = landmarks.getRightEyeBrow()
```
#### Fetch and Display Images from an URL
``` html
<img id="myImg" src="">
```
```js
const image = await faceapi.fetchImage('/images/example.png')
console.log(image instanceof HTMLImageElement) // true
// displaying the fetched image content
const myImg = document.getElementById('myImg')
myImg.src = image.src
```
#### Fetching JSON
```js
const json = await faceapi.fetchJson('/files/example.json')
```
#### Creating an Image Picker
``` html
<img id="myImg" src="">
<input id="myFileUpload" type="file" onchange="uploadImage()" accept=".jpg, .jpeg, .png">
```
```js
async function uploadImage() {
const imgFile = document.getElementById('myFileUpload').files[0]
// create an HTMLImageElement from a Blob
const img = await faceapi.bufferToImage(imgFile)
document.getElementById('myImg').src = img.src
}
```
#### Creating a Canvas Element from an Image or Video Element
``` html
<img id="myImg" src="images/example.png" />
<video id="myVideo" src="media/example.mp4" />
```
```js
const canvas1 = faceapi.createCanvasFromMedia(document.getElementById('myImg'))
const canvas2 = faceapi.createCanvasFromMedia(document.getElementById('myVideo'))
```
<a name="models"></a>
<br><hr><br>
## Available Models
<a name="models-face-detection"></a>
### Face Detection Models
#### SSD Mobilenet V1
For face detection, this project implements a SSD (Single Shot Multibox Detector) based on MobileNetV1. The neural net will compute the locations of each face in an image and will return the bounding boxes together with it's probability for each face. This face detector is aiming towards obtaining high accuracy in detecting face bounding boxes instead of low inference time. The size of the quantized model is about 5.4 MB (**ssd_mobilenetv1_model**).
The face detection model has been trained on the [WIDERFACE dataset](http://mmlab.ie.cuhk.edu.hk/projects/WIDERFace/) and the weights are provided by [yeephycho](https://github.com/yeephycho) in [this](https://github.com/yeephycho/tensorflow-face-detection) repo.
#### Tiny Face Detector
The Tiny Face Detector is a very performant, realtime face detector, which is much faster, smaller and less resource consuming compared to the SSD Mobilenet V1 face detector, in return it performs slightly less well on detecting small faces. This model is extremely mobile and web friendly, thus it should be your GO-TO face detector on mobile devices and resource limited clients. The size of the quantized model is only 190 KB (**tiny_face_detector_model**).
The face detector has been trained on a custom dataset of ~14K images labeled with bounding boxes. Furthermore the model has been trained to predict bounding boxes, which entirely cover facial feature points, thus it in general produces better results in combination with subsequent face landmark detection than SSD Mobilenet V1.
This model is basically an even tinier version of Tiny Yolo V2, replacing the regular convolutions of Yolo with depthwise separable convolutions. Yolo is fully convolutional, thus can easily adapt to different input image sizes to trade off accuracy for performance (inference time).
<a name="models-face-landmark-detection"></a>
### 68 Point Face Landmark Detection Models
This package implements a very lightweight and fast, yet accurate 68 point face landmark detector. The default model has a size of only 350kb (**face_landmark_68_model**) and the tiny model is only 80kb (**face_landmark_68_tiny_model**). Both models employ the ideas of depthwise separable convolutions as well as densely connected blocks. The models have been trained on a dataset of ~35k face images labeled with 68 face landmark points.
<a name="models-face-recognition"></a>
### Face Recognition Model
For face recognition, a ResNet-34 like architecture is implemented to compute a face descriptor (a feature vector with 128 values) from any given face image, which is used to describe the characteristics of a persons face. The model is **not** limited to the set of faces used for training, meaning you can use it for face recognition of any person, for example yourself. You can determine the similarity of two arbitrary faces by comparing their face descriptors, for example by computing the euclidean distance or using any other classifier of your choice.
The neural net is equivalent to the **FaceRecognizerNet** used in [face-recognition.js](https://github.com/justadudewhohacks/face-recognition.js) and the net used in the [dlib](https://github.com/davisking/dlib/blob/master/examples/dnn_face_recognition_ex.cpp) face recognition example. The weights have been trained by [davisking](https://github.com/davisking) and the model achieves a prediction accuracy of 99.38% on the LFW (Labeled Faces in the Wild) benchmark for face recognition.
The size of the quantized model is roughly 6.2 MB (**face_recognition_model**).
<a name="models-face-expression-recognition"></a>
### Face Expression Recognition Model
The face expression recognition model is lightweight, fast and provides reasonable accuracy. The model has a size of roughly 310kb and it employs depthwise separable convolutions and densely connected blocks. It has been trained on a variety of images from publicly available datasets as well as images scraped from the web. Note, that wearing glasses might decrease the accuracy of the prediction results.
<a name="models-age-and-gender-recognition"></a>
### Age and Gender Recognition Model
The age and gender recognition model is a multitask network, which employs a feature extraction layer, an age regression layer and a gender classifier. The model has a size of roughly 420kb and the feature extractor employs a tinier but very similar architecture to Xception.
This model has been trained and tested on the following databases with an 80/20 train/test split each: UTK, FGNET, Chalearn, Wiki, IMDB*, CACD*, MegaAge, MegaAge-Asian. The `*` indicates, that these databases have been algorithmically cleaned up, since the initial databases are very noisy.
#### Total Test Results
Total MAE (Mean Age Error): **4.54**
Total Gender Accuracy: **95%**
#### Test results for each database
The `-` indicates, that there are no gender labels available for these databases.
Database | UTK | FGNET | Chalearn | Wiki | IMDB* | CACD* | MegaAge | MegaAge-Asian |
----------------|-------:|------:|---------:|-----:|------:|------:|--------:|--------------:|
MAE | 5.25 | 4.23 | 6.24 | 6.54 | 3.63 | 3.20 | 6.23 | 4.21 |
Gender Accuracy | 0.93 | - | 0.94 | 0.95 | - | 0.97 | - | - |
#### Test results for different age category groups
Age Range | 0 - 3 | 4 - 8 | 9 - 18 | 19 - 28 | 29 - 40 | 41 - 60 | 60 - 80 | 80+ |
----------------|-------:|------:|-------:|--------:|--------:|--------:|--------:|--------:|
MAE | 1.52 | 3.06 | 4.82 | 4.99 | 5.43 | 4.94 | 6.17 | 9.91 |
Gender Accuracy | 0.69 | 0.80 | 0.88 | 0.96 | 0.97 | 0.97 | 0.96 | 0.9 |

View File

@ -1,38 +0,0 @@
{
"$schema": "https://developer.microsoft.com/json-schemas/api-extractor/v7/api-extractor.schema.json",
"mainEntryPointFilePath": "types/lib/src/index.d.ts",
"bundledPackages": ["@tensorflow/tfjs-core", "@tensorflow/tfjs-converter", "@types/offscreencanvas"],
"compiler": {
"skipLibCheck": false
},
"newlineKind": "lf",
"dtsRollup": {
"enabled": true,
"untrimmedFilePath": "types/face-api.d.ts"
},
"docModel": { "enabled": false },
"tsdocMetadata": {
"enabled": false
},
"apiReport": { "enabled": false },
"messages": {
"compilerMessageReporting": {
"default": {
"logLevel": "warning"
}
},
"extractorMessageReporting": {
"default": {
"logLevel": "warning"
},
"ae-missing-release-tag": {
"logLevel": "none"
}
},
"tsdocMessageReporting": {
"default": {
"logLevel": "warning"
}
}
}
}

View File

@ -1,77 +0,0 @@
const fs = require('fs');
const log = require('@vladmandic/pilogger');
const Build = require('@vladmandic/build').Build;
const APIExtractor = require('@microsoft/api-extractor');
const regEx = [
{ search: 'types="@webgpu/types/dist"', replace: 'path="../src/types/webgpu.d.ts"' },
{ search: 'types="offscreencanvas"', replace: 'path="../src/types/offscreencanvas.d.ts"' },
];
function copyFile(src, dst) {
if (!fs.existsSync(src)) {
log.warn('Copy:', { input: src, output: dst });
return;
}
log.state('Copy:', { input: src, output: dst });
const buffer = fs.readFileSync(src);
fs.writeFileSync(dst, buffer);
}
function writeFile(str, dst) {
log.state('Write:', { output: dst });
fs.writeFileSync(dst, str);
}
function regExFile(src, entries) {
if (!fs.existsSync(src)) {
log.warn('Filter:', { src });
return;
}
log.state('Filter:', { input: src });
for (const entry of entries) {
const buffer = fs.readFileSync(src, 'UTF-8');
const lines = buffer.split(/\r?\n/);
const out = [];
for (const line of lines) {
if (line.includes(entry.search)) out.push(line.replace(entry.search, entry.replace));
else out.push(line);
}
fs.writeFileSync(src, out.join('\n'));
}
}
const apiIgnoreList = ['ae-forgotten-export', 'ae-unresolved-link', 'tsdoc-param-tag-missing-hyphen'];
async function main() {
// run production build
const build = new Build();
await build.run('production');
// patch tfjs typedefs
log.state('Copy:', { input: 'types/lib/dist/tfjs.esm.d.ts' });
copyFile('types/lib/dist/tfjs.esm.d.ts', 'dist/tfjs.esm.d.ts');
// run api-extractor to create typedef rollup
const extractorConfig = APIExtractor.ExtractorConfig.loadFileAndPrepare('api-extractor.json');
const extractorResult = APIExtractor.Extractor.invoke(extractorConfig, {
localBuild: true,
showVerboseMessages: false,
messageCallback: (msg) => {
msg.handled = true;
if (msg.logLevel === 'none' || msg.logLevel === 'verbose' || msg.logLevel === 'info') return;
if (msg.sourceFilePath?.includes('/node_modules/')) return;
if (apiIgnoreList.reduce((prev, curr) => prev || msg.messageId.includes(curr), false)) return;
log.data('API', { level: msg.logLevel, category: msg.category, id: msg.messageId, file: msg.sourceFilePath, line: msg.sourceFileLine, text: msg.text });
},
});
log.state('API-Extractor:', { succeeeded: extractorResult.succeeded, errors: extractorResult.errorCount, warnings: extractorResult.warningCount });
regExFile('types/face-api.d.ts', regEx);
writeFile('export * from \'../types/face-api\';', 'dist/face-api.esm-nobundle.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.esm.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node-gpu.d.ts');
writeFile('export * from \'../types/face-api\';', 'dist/face-api.node-wasm.d.ts');
log.info('FaceAPI Build complete...');
}
main();

50
build/package.json Normal file
View File

@ -0,0 +1,50 @@
{
"name": "@vladmandic/face-api",
"version": "0.5.3",
"description": "JavaScript module for Face Detection and Face Recognition Using Tensorflow/JS",
"main": "build/src/index.js",
"browser": "dist/face-api.js",
"typings": "build/src/index.d.ts",
"engines": {
"node": ">=12.0.0"
},
"type": "module",
"scripts": {
"build-esm": "esbuild --bundle --format=esm --target=esnext --platform=browser --sourcemap --outfile=./dist/face-api.esm.js --external:@tensorflow/tfjs --log-level=error --tsconfig=./tsconfig.json build/src/index.js",
"build-iife": "esbuild --bundle --format=iife --target=esnext --platform=browser --sourcemap --outfile=./dist/face-api.js --global-name=faceapi --minify --log-level=error --tsconfig=./tsconfig.json build/src/index.js",
"build": "rimraf build/ dist/ && tsc && npm run build-esm && npm run build-iife"
},
"keywords": [
"tensorflow",
"tf",
"tfjs",
"face",
"face-api",
"face-detection",
"age-gender"
],
"repository": {
"type": "git",
"url": "git+https://github.com/vladmandic/face-api.git"
},
"publishConfig": {
"registry": "https://registry.npmjs.org/"
},
"author": "Vladimir Mandic <mandic00@live.com>",
"license": "MIT",
"bugs": {
"url": "https://github.com/vladmandic/face-api/issues"
},
"homepage": "https://github.com/vladmandic/face-api#readme",
"dependencies": {
"@tensorflow/tfjs": "^2.6.0"
},
"devDependencies": {
"@types/node": "^14.11.8",
"esbuild": "^0.6.34",
"rimraf": "^3.0.2",
"ts-node": "^9.0.0",
"tslib": "^2.0.3",
"typescript": "^4.1.0-dev.20201011"
}
}

View File

@ -1,10 +1,10 @@
import * as tf from '../dist/tfjs.esm';
import { ParamMapping } from './common/index';
import * as tf from '@tensorflow/tfjs';
import { ParamMapping } from './common';
export declare abstract class NeuralNetwork<TNetParams> {
constructor(name: string);
protected _name: string;
protected _params: TNetParams | undefined;
protected _paramMappings: ParamMapping[];
_name: any;
constructor(_name: string);
get params(): TNetParams | undefined;
get paramMappings(): ParamMapping[];
get isLoaded(): boolean;
@ -33,7 +33,7 @@ export declare abstract class NeuralNetwork<TNetParams> {
extractWeights(weights: Float32Array): void;
private traversePropertyPath;
protected abstract getDefaultModelName(): string;
protected abstract extractParamsFromWeightMap(weightMap: tf.NamedTensorMap): {
protected abstract extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap): {
params: TNetParams;
paramMappings: ParamMapping[];
};
@ -42,3 +42,4 @@ export declare abstract class NeuralNetwork<TNetParams> {
paramMappings: ParamMapping[];
};
}
//# sourceMappingURL=NeuralNetwork.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"NeuralNetwork.d.ts","sourceRoot":"","sources":["../../src/NeuralNetwork.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAEvC,OAAO,EAAE,YAAY,EAAE,MAAM,UAAU,CAAC;AAKxC,8BAAsB,aAAa,CAAC,UAAU;IAKhC,SAAS,CAAC,KAAK,EAAE,MAAM;IAHnC,SAAS,CAAC,OAAO,EAAE,UAAU,GAAG,SAAS,CAAY;IACrD,SAAS,CAAC,cAAc,EAAE,YAAY,EAAE,CAAK;gBAEvB,KAAK,EAAE,MAAM;IAWnC,IAAW,MAAM,IAAI,UAAU,GAAG,SAAS,CAAwB;IACnE,IAAW,aAAa,IAAI,YAAY,EAAE,CAA+B;IACzE,IAAW,QAAQ,IAAI,OAAO,CAAyB;IAEhD,gBAAgB,CAAC,SAAS,EAAE,MAAM,GAAG,EAAE,CAAC,MAAM;IAK9C,qBAAqB,CAAC,SAAS,EAAE,MAAM,EAAE,MAAM,EAAE,EAAE,CAAC,MAAM;IAM1D,YAAY;;;;IAOZ,kBAAkB;;;;IAIlB,eAAe;;;;IAIf,QAAQ;IAMR,MAAM;IAQN,OAAO,CAAC,gBAAgB,GAAE,OAAc;IAUxC,eAAe,IAAI,YAAY;IAQzB,IAAI,CAAC,YAAY,EAAE,YAAY,GAAG,MAAM,GAAG,SAAS,GAAG,OAAO,CAAC,IAAI,CAAC;IASpE,WAAW,CAAC,GAAG,EAAE,MAAM,GAAG,SAAS;IASnC,YAAY,CAAC,QAAQ,EAAE,MAAM,GAAG,SAAS;IAmB/C,iBAAiB,CAAC,SAAS,EAAE,EAAE,CAAC,cAAc;IAU9C,cAAc,CAAC,OAAO,EAAE,YAAY;IAU3C,OAAO,CAAC,oBAAoB;IAqB5B,SAAS,CAAC,QAAQ,CAAC,mBAAmB,IAAI,MAAM;IAChD,SAAS,CAAC,QAAQ,CAAC,0BAA0B,CAAC,SAAS,EAAE,EAAE,CAAC,cAAc,GAAG;QAAE,MAAM,EAAE,UAAU,CAAC;QAAC,aAAa,EAAE,YAAY,EAAE,CAAA;KAAE;IAClI,SAAS,CAAC,QAAQ,CAAC,aAAa,CAAC,OAAO,EAAE,YAAY,GAAG;QAAE,MAAM,EAAE,UAAU,CAAC;QAAC,aAAa,EAAE,YAAY,EAAE,CAAA;KAAE;CAC/G"}

122
build/src/NeuralNetwork.js Normal file
View File

@ -0,0 +1,122 @@
import * as tf from '@tensorflow/tfjs';
import { getModelUris } from './common/getModelUris';
import { loadWeightMap } from './dom';
import { env } from './env';
export class NeuralNetwork {
constructor(_name) {
this._name = _name;
this._params = undefined;
this._paramMappings = [];
/*
try {
const testTensor = tf.tensor([0]);
testTensor.toFloat();
} catch (err) {
throw new Error(`tfjs module not loaded: load '@tensorflow/tfjs' or '@tensorflow/tfjs-core' with appropriate backend explicitly`);
}
*/
}
get params() { return this._params; }
get paramMappings() { return this._paramMappings; }
get isLoaded() { return !!this.params; }
getParamFromPath(paramPath) {
const { obj, objProp } = this.traversePropertyPath(paramPath);
return obj[objProp];
}
reassignParamFromPath(paramPath, tensor) {
const { obj, objProp } = this.traversePropertyPath(paramPath);
obj[objProp].dispose();
obj[objProp] = tensor;
}
getParamList() {
return this._paramMappings.map(({ paramPath }) => ({
path: paramPath,
tensor: this.getParamFromPath(paramPath)
}));
}
getTrainableParams() {
return this.getParamList().filter(param => param.tensor instanceof tf.Variable);
}
getFrozenParams() {
return this.getParamList().filter(param => !(param.tensor instanceof tf.Variable));
}
variable() {
this.getFrozenParams().forEach(({ path, tensor }) => {
this.reassignParamFromPath(path, tensor.variable());
});
}
freeze() {
this.getTrainableParams().forEach(({ path, tensor: variable }) => {
const tensor = tf.tensor(variable.dataSync());
variable.dispose();
this.reassignParamFromPath(path, tensor);
});
}
dispose(throwOnRedispose = true) {
this.getParamList().forEach(param => {
if (throwOnRedispose && param.tensor.isDisposed) {
throw new Error(`param tensor has already been disposed for path ${param.path}`);
}
param.tensor.dispose();
});
this._params = undefined;
}
serializeParams() {
return new Float32Array(this.getParamList()
.map(({ tensor }) => Array.from(tensor.dataSync()))
.reduce((flat, arr) => flat.concat(arr)));
}
async load(weightsOrUrl) {
if (weightsOrUrl instanceof Float32Array) {
this.extractWeights(weightsOrUrl);
return;
}
await this.loadFromUri(weightsOrUrl);
}
async loadFromUri(uri) {
if (uri && typeof uri !== 'string') {
throw new Error(`${this._name}.loadFromUri - expected model uri`);
}
const weightMap = await loadWeightMap(uri, this.getDefaultModelName());
this.loadFromWeightMap(weightMap);
}
async loadFromDisk(filePath) {
if (filePath && typeof filePath !== 'string') {
throw new Error(`${this._name}.loadFromDisk - expected model file path`);
}
const { readFile } = env.getEnv();
const { manifestUri, modelBaseUri } = getModelUris(filePath, this.getDefaultModelName());
const fetchWeightsFromDisk = (filePaths) => Promise.all(filePaths.map(filePath => readFile(filePath).then(buf => buf.buffer)));
const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk);
const manifest = JSON.parse((await readFile(manifestUri)).toString());
const weightMap = await loadWeights(manifest, modelBaseUri);
this.loadFromWeightMap(weightMap);
}
loadFromWeightMap(weightMap) {
const { paramMappings, params } = this.extractParamsFromWeigthMap(weightMap);
this._paramMappings = paramMappings;
this._params = params;
}
extractWeights(weights) {
const { paramMappings, params } = this.extractParams(weights);
this._paramMappings = paramMappings;
this._params = params;
}
traversePropertyPath(paramPath) {
if (!this.params) {
throw new Error(`traversePropertyPath - model has no loaded params`);
}
const result = paramPath.split('/').reduce((res, objProp) => {
if (!res.nextObj.hasOwnProperty(objProp)) {
throw new Error(`traversePropertyPath - object does not have property ${objProp}, for path ${paramPath}`);
}
return { obj: res.nextObj, objProp, nextObj: res.nextObj[objProp] };
}, { nextObj: this.params });
const { obj, objProp } = result;
if (!obj || !objProp || !(obj[objProp] instanceof tf.Tensor)) {
throw new Error(`traversePropertyPath - parameter is not a tensor, for path ${paramPath}`);
}
return { obj, objProp };
}
}
//# sourceMappingURL=NeuralNetwork.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"NeuralNetwork.js","sourceRoot":"","sources":["../../src/NeuralNetwork.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAGvC,OAAO,EAAE,YAAY,EAAE,MAAM,uBAAuB,CAAC;AACrD,OAAO,EAAE,aAAa,EAAE,MAAM,OAAO,CAAC;AACtC,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAC;AAE5B,MAAM,OAAgB,aAAa;IAKjC,YAAsB,KAAa;QAAb,UAAK,GAAL,KAAK,CAAQ;QAHzB,YAAO,GAA2B,SAAS,CAAA;QAC3C,mBAAc,GAAmB,EAAE,CAAA;QAG3C;;;;;;;UAOE;IACJ,CAAC;IAED,IAAW,MAAM,KAA6B,OAAO,IAAI,CAAC,OAAO,CAAA,CAAC,CAAC;IACnE,IAAW,aAAa,KAAqB,OAAO,IAAI,CAAC,cAAc,CAAA,CAAC,CAAC;IACzE,IAAW,QAAQ,KAAc,OAAO,CAAC,CAAC,IAAI,CAAC,MAAM,CAAA,CAAC,CAAC;IAEhD,gBAAgB,CAAC,SAAiB;QACvC,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,oBAAoB,CAAC,SAAS,CAAC,CAAA;QAC7D,OAAO,GAAG,CAAC,OAAO,CAAC,CAAA;IACrB,CAAC;IAEM,qBAAqB,CAAC,SAAiB,EAAE,MAAiB;QAC/D,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,oBAAoB,CAAC,SAAS,CAAC,CAAA;QAC7D,GAAG,CAAC,OAAO,CAAC,CAAC,OAAO,EAAE,CAAA;QACtB,GAAG,CAAC,OAAO,CAAC,GAAG,MAAM,CAAA;IACvB,CAAC;IAEM,YAAY;QACjB,OAAO,IAAI,CAAC,cAAc,CAAC,GAAG,CAAC,CAAC,EAAE,SAAS,EAAE,EAAE,EAAE,CAAC,CAAC;YACjD,IAAI,EAAE,SAAS;YACf,MAAM,EAAE,IAAI,CAAC,gBAAgB,CAAC,SAAS,CAAC;SACzC,CAAC,CAAC,CAAA;IACL,CAAC;IAEM,kBAAkB;QACvB,OAAO,IAAI,CAAC,YAAY,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,KAAK,CAAC,MAAM,YAAY,EAAE,CAAC,QAAQ,CAAC,CAAA;IACjF,CAAC;IAEM,eAAe;QACpB,OAAO,IAAI,CAAC,YAAY,EAAE,CAAC,MAAM,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,YAAY,EAAE,CAAC,QAAQ,CAAC,CAAC,CAAA;IACpF,CAAC;IAEM,QAAQ;QACb,IAAI,CAAC,eAAe,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,EAAE,EAAE;YAClD,IAAI,CAAC,qBAAqB,CAAC,IAAI,EAAE,MAAM,CAAC,QAAQ,EAAE,CAAC,CAAA;QACrD,CAAC,CAAC,CAAA;IACJ,CAAC;IAEM,MAAM;QACX,IAAI,CAAC,kBAAkB,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,EAAE,EAAE;YAC/D,MAAM,MAAM,GAAG,EAAE,CAAC,MAAM,CAAC,QAAQ,CAAC,QAAQ,EAAE,CAAC,CAAA;YAC7C,QAAQ,CAAC,OAAO,EAAE,CAAA;YAClB,IAAI,CAAC,qBAAqB,CAAC,IAAI,EAAE,MAAM,CAAC,CAAA;QAC1C,CAAC,CAAC,CAAA;IACJ,CAAC;IAEM,OAAO,CAAC,mBAA4B,IAAI;QAC7C,IAAI,CAAC,YAAY,EAAE,CAAC,OAAO,CAAC,KAAK,CAAC,EAAE;YAClC,IAAI,gBAAgB,IAAI,KAAK,CAAC,MAAM,CAAC,UAAU,EAAE;gBAC/C,MAAM,IAAI,KAAK,CAAC,mDAAmD,KAAK,CAAC,IAAI,EAAE,CAAC,CAAA;aACjF;YACD,KAAK,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QACxB,CAAC,CAAC,CAAA;QACF,IAAI,CAAC,OAAO,GAAG,SAAS,CAAA;IAC1B,CAAC;IAEM,eAAe;QACpB,OAAO,IAAI,YAAY,CACrB,IAAI,CAAC,YAAY,EAAE;aAChB,GAAG,CAAC,CAAC,EAAE,MAAM,EAAE,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,QAAQ,EAAE,CAAa,CAAC;aAC9D,MAAM,CAAC,CAAC,IAAI,EAAE,GAAG,EAAE,EAAE,CAAC,IAAI,CAAC,MAAM,CAAC,GAAG,CAAC,CAAC,CAC3C,CAAA;IACH,CAAC;IAEM,KAAK,CAAC,IAAI,CAAC,YAA+C;QAC/D,IAAI,YAAY,YAAY,YAAY,EAAE;YACxC,IAAI,CAAC,cAAc,CAAC,YAAY,CAAC,CAAA;YACjC,OAAM;SACP;QAED,MAAM,IAAI,CAAC,WAAW,CAAC,YAAY,CAAC,CAAA;IACtC,CAAC;IAEM,KAAK,CAAC,WAAW,CAAC,GAAuB;QAC9C,IAAI,GAAG,IAAI,OAAO,GAAG,KAAK,QAAQ,EAAE;YAClC,MAAM,IAAI,KAAK,CAAC,GAAG,IAAI,CAAC,KAAK,mCAAmC,CAAC,CAAA;SAClE;QAED,MAAM,SAAS,GAAG,MAAM,aAAa,CAAC,GAAG,EAAE,IAAI,CAAC,mBAAmB,EAAE,CAAC,CAAA;QACtE,IAAI,CAAC,iBAAiB,CAAC,SAAS,CAAC,CAAA;IACnC,CAAC;IAEM,KAAK,CAAC,YAAY,CAAC,QAA4B;QACpD,IAAI,QAAQ,IAAI,OAAO,QAAQ,KAAK,QAAQ,EAAE;YAC5C,MAAM,IAAI,KAAK,CAAC,GAAG,IAAI,CAAC,KAAK,0CAA0C,CAAC,CAAA;SACzE;QAED,MAAM,EAAE,QAAQ,EAAE,GAAG,GAAG,CAAC,MAAM,EAAE,CAAA;QAEjC,MAAM,EAAE,WAAW,EAAE,YAAY,EAAE,GAAG,YAAY,CAAC,QAAQ,EAAE,IAAI,CAAC,mBAAmB,EAAE,CAAC,CAAA;QAExF,MAAM,oBAAoB,GAAG,CAAC,SAAmB,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAC/D,SAAS,CAAC,GAAG,CAAC,QAAQ,CAAC,EAAE,CAAC,QAAQ,CAAC,QAAQ,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC,CACtE,CAAA;QACD,MAAM,WAAW,GAAG,EAAE,CAAC,EAAE,CAAC,oBAAoB,CAAC,oBAAoB,CAAC,CAAA;QACpE,MAAM,QAAQ,GAAG,IAAI,CAAC,KAAK,CAAC,CAAC,MAAM,QAAQ,CAAC,WAAW,CAAC,CAAC,CAAC,QAAQ,EAAE,CAAC,CAAA;QACrE,MAAM,SAAS,GAAG,MAAM,WAAW,CAAC,QAAQ,EAAE,YAAY,CAAC,CAAA;QAE3D,IAAI,CAAC,iBAAiB,CAAC,SAAS,CAAC,CAAA;IACnC,CAAC;IAEM,iBAAiB,CAAC,SAA4B;QACnD,MAAM,EACJ,aAAa,EACb,MAAM,EACP,GAAG,IAAI,CAAC,0BAA0B,CAAC,SAAS,CAAC,CAAA;QAE9C,IAAI,CAAC,cAAc,GAAG,aAAa,CAAA;QACnC,IAAI,CAAC,OAAO,GAAG,MAAM,CAAA;IACvB,CAAC;IAEM,cAAc,CAAC,OAAqB;QACzC,MAAM,EACJ,aAAa,EACb,MAAM,EACP,GAAG,IAAI,CAAC,aAAa,CAAC,OAAO,CAAC,CAAA;QAE/B,IAAI,CAAC,cAAc,GAAG,aAAa,CAAA;QACnC,IAAI,CAAC,OAAO,GAAG,MAAM,CAAA;IACvB,CAAC;IAEO,oBAAoB,CAAC,SAAiB;QAC5C,IAAI,CAAC,IAAI,CAAC,MAAM,EAAE;YAChB,MAAM,IAAI,KAAK,CAAC,mDAAmD,CAAC,CAAA;SACrE;QAED,MAAM,MAAM,GAAG,SAAS,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,MAAM,CAAC,CAAC,GAAkD,EAAE,OAAO,EAAE,EAAE;YACzG,IAAI,CAAC,GAAG,CAAC,OAAO,CAAC,cAAc,CAAC,OAAO,CAAC,EAAE;gBACxC,MAAM,IAAI,KAAK,CAAC,wDAAwD,OAAO,cAAc,SAAS,EAAE,CAAC,CAAA;aAC1G;YAED,OAAO,EAAE,GAAG,EAAE,GAAG,CAAC,OAAO,EAAE,OAAO,EAAE,OAAO,EAAE,GAAG,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAA;QACrE,CAAC,EAAE,EAAE,OAAO,EAAE,IAAI,CAAC,MAAM,EAAE,CAAC,CAAA;QAE5B,MAAM,EAAE,GAAG,EAAE,OAAO,EAAE,GAAG,MAAM,CAAA;QAC/B,IAAI,CAAC,GAAG,IAAI,CAAC,OAAO,IAAI,CAAC,CAAC,GAAG,CAAC,OAAO,CAAC,YAAY,EAAE,CAAC,MAAM,CAAC,EAAE;YAC5D,MAAM,IAAI,KAAK,CAAC,8DAA8D,SAAS,EAAE,CAAC,CAAA;SAC3F;QAED,OAAO,EAAE,GAAG,EAAE,OAAO,EAAE,CAAA;IACzB,CAAC;CAKF"}

8
build/src/Platform.d.ts vendored Normal file
View File

@ -0,0 +1,8 @@
export declare class PlatformBrowser {
private textEncoder;
fetch(path: string, init?: RequestInit): Promise<Response>;
now(): number;
encode(text: string, encoding: string): Uint8Array;
decode(bytes: Uint8Array, encoding: string): string;
}
//# sourceMappingURL=Platform.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"Platform.d.ts","sourceRoot":"","sources":["../../src/Platform.ts"],"names":[],"mappings":"AAAA,qBAAa,eAAe;IAC1B,OAAO,CAAC,WAAW,CAAc;IAEjC,KAAK,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,CAAC,EAAE,WAAW,GAAG,OAAO,CAAC,QAAQ,CAAC;IAI1D,GAAG,IAAI,MAAM;IAIb,MAAM,CAAC,IAAI,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM,GAAG,UAAU;IAUlD,MAAM,CAAC,KAAK,EAAE,UAAU,EAAE,QAAQ,EAAE,MAAM,GAAG,MAAM;CAGpD"}

21
build/src/Platform.js Normal file
View File

@ -0,0 +1,21 @@
export class PlatformBrowser {
fetch(path, init) {
return fetch(path, init);
}
now() {
return performance.now();
}
encode(text, encoding) {
if (encoding !== 'utf-8' && encoding !== 'utf8') {
throw new Error(`Browser's encoder only supports utf-8, but got ${encoding}`);
}
if (this.textEncoder == null) {
this.textEncoder = new TextEncoder();
}
return this.textEncoder.encode(text);
}
decode(bytes, encoding) {
return new TextDecoder(encoding).decode(bytes);
}
}
//# sourceMappingURL=Platform.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"Platform.js","sourceRoot":"","sources":["../../src/Platform.ts"],"names":[],"mappings":"AAAA,MAAM,OAAO,eAAe;IAG1B,KAAK,CAAC,IAAY,EAAE,IAAkB;QACpC,OAAO,KAAK,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC;IAC3B,CAAC;IAED,GAAG;QACD,OAAO,WAAW,CAAC,GAAG,EAAE,CAAC;IAC3B,CAAC;IAED,MAAM,CAAC,IAAY,EAAE,QAAgB;QACnC,IAAI,QAAQ,KAAK,OAAO,IAAI,QAAQ,KAAK,MAAM,EAAE;YAC/C,MAAM,IAAI,KAAK,CACX,kDAAkD,QAAQ,EAAE,CAAC,CAAC;SACnE;QACD,IAAI,IAAI,CAAC,WAAW,IAAI,IAAI,EAAE;YAC5B,IAAI,CAAC,WAAW,GAAG,IAAI,WAAW,EAAE,CAAC;SACtC;QACD,OAAO,IAAI,CAAC,WAAW,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC;IACvC,CAAC;IACD,MAAM,CAAC,KAAiB,EAAE,QAAgB;QACxC,OAAO,IAAI,WAAW,CAAC,QAAQ,CAAC,CAAC,MAAM,CAAC,KAAK,CAAC,CAAC;IACjD,CAAC;CACF"}

View File

@ -1,8 +1,8 @@
import * as tf from '../../dist/tfjs.esm.js';
import * as tf from '@tensorflow/tfjs';
import { TinyXception } from '../xception/TinyXception';
import { AgeAndGenderPrediction, NetOutput, NetParams } from './types';
import { NeuralNetwork } from '../NeuralNetwork';
import { NetInput, TNetInput } from '../dom/index';
import { NetInput, TNetInput } from '../dom';
export declare class AgeGenderNet extends NeuralNetwork<NetParams> {
private _faceFeatureExtractor;
constructor(faceFeatureExtractor?: TinyXception);
@ -16,14 +16,15 @@ export declare class AgeGenderNet extends NeuralNetwork<NetParams> {
loadClassifierParams(weights: Float32Array): void;
extractClassifierParams(weights: Float32Array): {
params: NetParams;
paramMappings: import("../common/types.js").ParamMapping[];
paramMappings: import("../common").ParamMapping[];
};
protected extractParamsFromWeightMap(weightMap: tf.NamedTensorMap): {
protected extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap): {
params: NetParams;
paramMappings: import("../common/types.js").ParamMapping[];
paramMappings: import("../common").ParamMapping[];
};
protected extractParams(weights: Float32Array): {
params: NetParams;
paramMappings: import("../common/types.js").ParamMapping[];
paramMappings: import("../common").ParamMapping[];
};
}
//# sourceMappingURL=AgeGenderNet.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"AgeGenderNet.d.ts","sourceRoot":"","sources":["../../../src/ageGenderNet/AgeGenderNet.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAIvC,OAAO,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AAGxD,OAAO,EAAE,sBAAsB,EAAU,SAAS,EAAE,SAAS,EAAE,MAAM,SAAS,CAAC;AAC/E,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,QAAQ,EAAE,SAAS,EAAc,MAAM,QAAQ,CAAC;AAEzD,qBAAa,YAAa,SAAQ,aAAa,CAAC,SAAS,CAAC;IAExD,OAAO,CAAC,qBAAqB,CAAc;gBAE/B,oBAAoB,GAAE,YAAkC;IAKpE,IAAW,oBAAoB,IAAI,YAAY,CAE9C;IAEM,MAAM,CAAC,KAAK,EAAE,QAAQ,GAAG,EAAE,CAAC,QAAQ,GAAG,SAAS;IAoBhD,YAAY,CAAC,KAAK,EAAE,QAAQ,GAAG,EAAE,CAAC,QAAQ,GAAG,SAAS;IAOhD,OAAO,CAAC,KAAK,EAAE,SAAS,GAAG,OAAO,CAAC,SAAS,CAAC;IAI7C,mBAAmB,CAAC,KAAK,EAAE,SAAS,GAAG,OAAO,CAAC,sBAAsB,GAAG,sBAAsB,EAAE,CAAC;IAgC9G,SAAS,CAAC,mBAAmB,IAAI,MAAM;IAIhC,OAAO,CAAC,gBAAgB,GAAE,OAAc;IAKxC,oBAAoB,CAAC,OAAO,EAAE,YAAY;IAM1C,uBAAuB,CAAC,OAAO,EAAE,YAAY;;;;IAIpD,SAAS,CAAC,0BAA0B,CAAC,SAAS,EAAE,EAAE,CAAC,cAAc;;;;IASjE,SAAS,CAAC,aAAa,CAAC,OAAO,EAAE,YAAY;;;;CAU9C"}

View File

@ -0,0 +1,95 @@
import * as tf from '@tensorflow/tfjs';
import { fullyConnectedLayer } from '../common/fullyConnectedLayer';
import { seperateWeightMaps } from '../faceProcessor/util';
import { TinyXception } from '../xception/TinyXception';
import { extractParams } from './extractParams';
import { extractParamsFromWeigthMap } from './extractParamsFromWeigthMap';
import { Gender } from './types';
import { NeuralNetwork } from '../NeuralNetwork';
import { NetInput, toNetInput } from '../dom';
export class AgeGenderNet extends NeuralNetwork {
constructor(faceFeatureExtractor = new TinyXception(2)) {
super('AgeGenderNet');
this._faceFeatureExtractor = faceFeatureExtractor;
}
get faceFeatureExtractor() {
return this._faceFeatureExtractor;
}
runNet(input) {
const { params } = this;
if (!params) {
throw new Error(`${this._name} - load model before inference`);
}
return tf.tidy(() => {
const bottleneckFeatures = input instanceof NetInput
? this.faceFeatureExtractor.forwardInput(input)
: input;
const pooled = tf.avgPool(bottleneckFeatures, [7, 7], [2, 2], 'valid').as2D(bottleneckFeatures.shape[0], -1);
const age = fullyConnectedLayer(pooled, params.fc.age).as1D();
const gender = fullyConnectedLayer(pooled, params.fc.gender);
return { age, gender };
});
}
forwardInput(input) {
return tf.tidy(() => {
const { age, gender } = this.runNet(input);
return { age, gender: tf.softmax(gender) };
});
}
async forward(input) {
return this.forwardInput(await toNetInput(input));
}
async predictAgeAndGender(input) {
const netInput = await toNetInput(input);
const out = await this.forwardInput(netInput);
const ages = tf.unstack(out.age);
const genders = tf.unstack(out.gender);
const ageAndGenderTensors = ages.map((ageTensor, i) => ({
ageTensor,
genderTensor: genders[i]
}));
const predictionsByBatch = await Promise.all(ageAndGenderTensors.map(async ({ ageTensor, genderTensor }) => {
const age = (await ageTensor.data())[0];
const probMale = (await genderTensor.data())[0];
const isMale = probMale > 0.5;
const gender = isMale ? Gender.MALE : Gender.FEMALE;
const genderProbability = isMale ? probMale : (1 - probMale);
ageTensor.dispose();
genderTensor.dispose();
return { age, gender, genderProbability };
}));
out.age.dispose();
out.gender.dispose();
return netInput.isBatchInput
? predictionsByBatch
: predictionsByBatch[0];
}
getDefaultModelName() {
return 'age_gender_model';
}
dispose(throwOnRedispose = true) {
this.faceFeatureExtractor.dispose(throwOnRedispose);
super.dispose(throwOnRedispose);
}
loadClassifierParams(weights) {
const { params, paramMappings } = this.extractClassifierParams(weights);
this._params = params;
this._paramMappings = paramMappings;
}
extractClassifierParams(weights) {
return extractParams(weights);
}
extractParamsFromWeigthMap(weightMap) {
const { featureExtractorMap, classifierMap } = seperateWeightMaps(weightMap);
this.faceFeatureExtractor.loadFromWeightMap(featureExtractorMap);
return extractParamsFromWeigthMap(classifierMap);
}
extractParams(weights) {
const classifierWeightSize = (512 * 1 + 1) + (512 * 2 + 2);
const featureExtractorWeights = weights.slice(0, weights.length - classifierWeightSize);
const classifierWeights = weights.slice(weights.length - classifierWeightSize);
this.faceFeatureExtractor.extractWeights(featureExtractorWeights);
return this.extractClassifierParams(classifierWeights);
}
}
//# sourceMappingURL=AgeGenderNet.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"AgeGenderNet.js","sourceRoot":"","sources":["../../../src/ageGenderNet/AgeGenderNet.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAEvC,OAAO,EAAE,mBAAmB,EAAE,MAAM,+BAA+B,CAAC;AACpE,OAAO,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAC;AAC3D,OAAO,EAAE,YAAY,EAAE,MAAM,0BAA0B,CAAC;AACxD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,0BAA0B,EAAE,MAAM,8BAA8B,CAAC;AAC1E,OAAO,EAA0B,MAAM,EAAwB,MAAM,SAAS,CAAC;AAC/E,OAAO,EAAE,aAAa,EAAE,MAAM,kBAAkB,CAAC;AACjD,OAAO,EAAE,QAAQ,EAAa,UAAU,EAAE,MAAM,QAAQ,CAAC;AAEzD,MAAM,OAAO,YAAa,SAAQ,aAAwB;IAIxD,YAAY,uBAAqC,IAAI,YAAY,CAAC,CAAC,CAAC;QAClE,KAAK,CAAC,cAAc,CAAC,CAAA;QACrB,IAAI,CAAC,qBAAqB,GAAG,oBAAoB,CAAA;IACnD,CAAC;IAED,IAAW,oBAAoB;QAC7B,OAAO,IAAI,CAAC,qBAAqB,CAAA;IACnC,CAAC;IAEM,MAAM,CAAC,KAA6B;QAEzC,MAAM,EAAE,MAAM,EAAE,GAAG,IAAI,CAAA;QAEvB,IAAI,CAAC,MAAM,EAAE;YACX,MAAM,IAAI,KAAK,CAAC,GAAG,IAAI,CAAC,KAAK,gCAAgC,CAAC,CAAA;SAC/D;QAED,OAAO,EAAE,CAAC,IAAI,CAAC,GAAG,EAAE;YAClB,MAAM,kBAAkB,GAAG,KAAK,YAAY,QAAQ;gBAClD,CAAC,CAAC,IAAI,CAAC,oBAAoB,CAAC,YAAY,CAAC,KAAK,CAAC;gBAC/C,CAAC,CAAC,KAAK,CAAA;YAET,MAAM,MAAM,GAAG,EAAE,CAAC,OAAO,CAAC,kBAAkB,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,EAAE,OAAO,CAAC,CAAC,IAAI,CAAC,kBAAkB,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;YAC5G,MAAM,GAAG,GAAG,mBAAmB,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE,CAAC,GAAG,CAAC,CAAC,IAAI,EAAE,CAAA;YAC7D,MAAM,MAAM,GAAG,mBAAmB,CAAC,MAAM,EAAE,MAAM,CAAC,EAAE,CAAC,MAAM,CAAC,CAAA;YAC5D,OAAO,EAAE,GAAG,EAAE,MAAM,EAAE,CAAA;QACxB,CAAC,CAAC,CAAA;IACJ,CAAC;IAEM,YAAY,CAAC,KAA6B;QAC/C,OAAO,EAAE,CAAC,IAAI,CAAC,GAAG,EAAE;YAClB,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,GAAG,IAAI,CAAC,MAAM,CAAC,KAAK,CAAC,CAAA;YAC1C,OAAO,EAAE,GAAG,EAAE,MAAM,EAAE,EAAE,CAAC,OAAO,CAAC,MAAM,CAAC,EAAE,CAAA;QAC5C,CAAC,CAAC,CAAA;IACJ,CAAC;IAEM,KAAK,CAAC,OAAO,CAAC,KAAgB;QACnC,OAAO,IAAI,CAAC,YAAY,CAAC,MAAM,UAAU,CAAC,KAAK,CAAC,CAAC,CAAA;IACnD,CAAC;IAEM,KAAK,CAAC,mBAAmB,CAAC,KAAgB;QAC/C,MAAM,QAAQ,GAAG,MAAM,UAAU,CAAC,KAAK,CAAC,CAAA;QACxC,MAAM,GAAG,GAAG,MAAM,IAAI,CAAC,YAAY,CAAC,QAAQ,CAAC,CAAA;QAE7C,MAAM,IAAI,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,CAAA;QAChC,MAAM,OAAO,GAAG,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,MAAM,CAAC,CAAA;QACtC,MAAM,mBAAmB,GAAG,IAAI,CAAC,GAAG,CAAC,CAAC,SAAS,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC;YACtD,SAAS;YACT,YAAY,EAAE,OAAO,CAAC,CAAC,CAAC;SACzB,CAAC,CAAC,CAAA;QAEH,MAAM,kBAAkB,GAAG,MAAM,OAAO,CAAC,GAAG,CAC1C,mBAAmB,CAAC,GAAG,CAAC,KAAK,EAAE,EAAE,SAAS,EAAE,YAAY,EAAE,EAAE,EAAE;YAC5D,MAAM,GAAG,GAAG,CAAC,MAAM,SAAS,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;YACvC,MAAM,QAAQ,GAAG,CAAC,MAAM,YAAY,CAAC,IAAI,EAAE,CAAC,CAAC,CAAC,CAAC,CAAA;YAC/C,MAAM,MAAM,GAAG,QAAQ,GAAG,GAAG,CAAA;YAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,CAAC,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,MAAM,CAAC,MAAM,CAAA;YACnD,MAAM,iBAAiB,GAAG,MAAM,CAAC,CAAC,CAAC,QAAQ,CAAC,CAAC,CAAC,CAAC,CAAC,GAAG,QAAQ,CAAC,CAAA;YAE5D,SAAS,CAAC,OAAO,EAAE,CAAA;YACnB,YAAY,CAAC,OAAO,EAAE,CAAA;YACtB,OAAO,EAAE,GAAG,EAAE,MAAM,EAAE,iBAAiB,EAAE,CAAA;QAC3C,CAAC,CAAC,CACH,CAAA;QACD,GAAG,CAAC,GAAG,CAAC,OAAO,EAAE,CAAA;QACjB,GAAG,CAAC,MAAM,CAAC,OAAO,EAAE,CAAA;QAEpB,OAAO,QAAQ,CAAC,YAAY;YAC1B,CAAC,CAAC,kBAAkB;YACpB,CAAC,CAAC,kBAAkB,CAAC,CAAC,CAAC,CAAA;IAC3B,CAAC;IAES,mBAAmB;QAC3B,OAAO,kBAAkB,CAAA;IAC3B,CAAC;IAEM,OAAO,CAAC,mBAA4B,IAAI;QAC7C,IAAI,CAAC,oBAAoB,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAA;QACnD,KAAK,CAAC,OAAO,CAAC,gBAAgB,CAAC,CAAA;IACjC,CAAC;IAEM,oBAAoB,CAAC,OAAqB;QAC/C,MAAM,EAAE,MAAM,EAAE,aAAa,EAAE,GAAG,IAAI,CAAC,uBAAuB,CAAC,OAAO,CAAC,CAAA;QACvE,IAAI,CAAC,OAAO,GAAG,MAAM,CAAA;QACrB,IAAI,CAAC,cAAc,GAAG,aAAa,CAAA;IACrC,CAAC;IAEM,uBAAuB,CAAC,OAAqB;QAClD,OAAO,aAAa,CAAC,OAAO,CAAC,CAAA;IAC/B,CAAC;IAES,0BAA0B,CAAC,SAA4B;QAE/D,MAAM,EAAE,mBAAmB,EAAE,aAAa,EAAE,GAAG,kBAAkB,CAAC,SAAS,CAAC,CAAA;QAE5E,IAAI,CAAC,oBAAoB,CAAC,iBAAiB,CAAC,mBAAmB,CAAC,CAAA;QAEhE,OAAO,0BAA0B,CAAC,aAAa,CAAC,CAAA;IAClD,CAAC;IAES,aAAa,CAAC,OAAqB;QAE3C,MAAM,oBAAoB,GAAG,CAAC,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,GAAG,CAAC,GAAG,GAAG,CAAC,GAAG,CAAC,CAAC,CAAA;QAE1D,MAAM,uBAAuB,GAAG,OAAO,CAAC,KAAK,CAAC,CAAC,EAAE,OAAO,CAAC,MAAM,GAAG,oBAAoB,CAAC,CAAA;QACvF,MAAM,iBAAiB,GAAG,OAAO,CAAC,KAAK,CAAC,OAAO,CAAC,MAAM,GAAG,oBAAoB,CAAC,CAAA;QAE9E,IAAI,CAAC,oBAAoB,CAAC,cAAc,CAAC,uBAAuB,CAAC,CAAA;QACjE,OAAO,IAAI,CAAC,uBAAuB,CAAC,iBAAiB,CAAC,CAAA;IACxD,CAAC;CACF"}

View File

@ -1,6 +1,7 @@
import { ParamMapping } from '../common/index';
import { ParamMapping } from '../common';
import { NetParams } from './types';
export declare function extractParams(weights: Float32Array): {
params: NetParams;
paramMappings: ParamMapping[];
};
//# sourceMappingURL=extractParams.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"extractParams.d.ts","sourceRoot":"","sources":["../../../src/ageGenderNet/extractParams.ts"],"names":[],"mappings":"AAAA,OAAO,EAAiD,YAAY,EAAE,MAAM,WAAW,CAAC;AACxF,OAAO,EAAE,SAAS,EAAE,MAAM,SAAS,CAAC;AAEpC,wBAAgB,aAAa,CAAC,OAAO,EAAE,YAAY,GAAG;IAAE,MAAM,EAAE,SAAS,CAAC;IAAC,aAAa,EAAE,YAAY,EAAE,CAAA;CAAE,CAsBzG"}

View File

@ -0,0 +1,16 @@
import { extractFCParamsFactory, extractWeightsFactory } from '../common';
export function extractParams(weights) {
const paramMappings = [];
const { extractWeights, getRemainingWeights } = extractWeightsFactory(weights);
const extractFCParams = extractFCParamsFactory(extractWeights, paramMappings);
const age = extractFCParams(512, 1, 'fc/age');
const gender = extractFCParams(512, 2, 'fc/gender');
if (getRemainingWeights().length !== 0) {
throw new Error(`weights remaing after extract: ${getRemainingWeights().length}`);
}
return {
paramMappings,
params: { fc: { age, gender } }
};
}
//# sourceMappingURL=extractParams.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"extractParams.js","sourceRoot":"","sources":["../../../src/ageGenderNet/extractParams.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,sBAAsB,EAAE,qBAAqB,EAAgB,MAAM,WAAW,CAAC;AAGxF,MAAM,UAAU,aAAa,CAAC,OAAqB;IAEjD,MAAM,aAAa,GAAmB,EAAE,CAAA;IAExC,MAAM,EACJ,cAAc,EACd,mBAAmB,EACpB,GAAG,qBAAqB,CAAC,OAAO,CAAC,CAAA;IAElC,MAAM,eAAe,GAAG,sBAAsB,CAAC,cAAc,EAAE,aAAa,CAAC,CAAA;IAE7E,MAAM,GAAG,GAAG,eAAe,CAAC,GAAG,EAAE,CAAC,EAAE,QAAQ,CAAC,CAAA;IAC7C,MAAM,MAAM,GAAG,eAAe,CAAC,GAAG,EAAE,CAAC,EAAE,WAAW,CAAC,CAAA;IAEnD,IAAI,mBAAmB,EAAE,CAAC,MAAM,KAAK,CAAC,EAAE;QACtC,MAAM,IAAI,KAAK,CAAC,kCAAkC,mBAAmB,EAAE,CAAC,MAAM,EAAE,CAAC,CAAA;KAClF;IAED,OAAO;QACL,aAAa;QACb,MAAM,EAAE,EAAE,EAAE,EAAE,EAAE,GAAG,EAAE,MAAM,EAAE,EAAE;KAChC,CAAA;AACH,CAAC"}

View File

@ -0,0 +1,8 @@
import * as tf from '@tensorflow/tfjs';
import { ParamMapping } from '../common';
import { NetParams } from './types';
export declare function extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap): {
params: NetParams;
paramMappings: ParamMapping[];
};
//# sourceMappingURL=extractParamsFromWeigthMap.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"extractParamsFromWeigthMap.d.ts","sourceRoot":"","sources":["../../../src/ageGenderNet/extractParamsFromWeigthMap.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAEvC,OAAO,EAAmE,YAAY,EAAE,MAAM,WAAW,CAAC;AAC1G,OAAO,EAAE,SAAS,EAAE,MAAM,SAAS,CAAC;AAEpC,wBAAgB,0BAA0B,CACxC,SAAS,EAAE,EAAE,CAAC,cAAc,GAC3B;IAAE,MAAM,EAAE,SAAS,CAAC;IAAC,aAAa,EAAE,YAAY,EAAE,CAAA;CAAE,CAsBtD"}

View File

@ -0,0 +1,19 @@
import { disposeUnusedWeightTensors, extractWeightEntryFactory } from '../common';
export function extractParamsFromWeigthMap(weightMap) {
const paramMappings = [];
const extractWeightEntry = extractWeightEntryFactory(weightMap, paramMappings);
function extractFcParams(prefix) {
const weights = extractWeightEntry(`${prefix}/weights`, 2);
const bias = extractWeightEntry(`${prefix}/bias`, 1);
return { weights, bias };
}
const params = {
fc: {
age: extractFcParams('fc/age'),
gender: extractFcParams('fc/gender')
}
};
disposeUnusedWeightTensors(weightMap, paramMappings);
return { params, paramMappings };
}
//# sourceMappingURL=extractParamsFromWeigthMap.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"extractParamsFromWeigthMap.js","sourceRoot":"","sources":["../../../src/ageGenderNet/extractParamsFromWeigthMap.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,0BAA0B,EAAE,yBAAyB,EAA0B,MAAM,WAAW,CAAC;AAG1G,MAAM,UAAU,0BAA0B,CACxC,SAA4B;IAG5B,MAAM,aAAa,GAAmB,EAAE,CAAA;IAExC,MAAM,kBAAkB,GAAG,yBAAyB,CAAC,SAAS,EAAE,aAAa,CAAC,CAAA;IAE9E,SAAS,eAAe,CAAC,MAAc;QACrC,MAAM,OAAO,GAAG,kBAAkB,CAAc,GAAG,MAAM,UAAU,EAAE,CAAC,CAAC,CAAA;QACvE,MAAM,IAAI,GAAG,kBAAkB,CAAc,GAAG,MAAM,OAAO,EAAE,CAAC,CAAC,CAAA;QACjE,OAAO,EAAE,OAAO,EAAE,IAAI,EAAE,CAAA;IAC1B,CAAC;IAED,MAAM,MAAM,GAAG;QACb,EAAE,EAAE;YACF,GAAG,EAAE,eAAe,CAAC,QAAQ,CAAC;YAC9B,MAAM,EAAE,eAAe,CAAC,WAAW,CAAC;SACrC;KACF,CAAA;IAED,0BAA0B,CAAC,SAAS,EAAE,aAAa,CAAC,CAAA;IAEpD,OAAO,EAAE,MAAM,EAAE,aAAa,EAAE,CAAA;AAClC,CAAC"}

3
build/src/ageGenderNet/index.d.ts vendored Normal file
View File

@ -0,0 +1,3 @@
export * from './AgeGenderNet';
export * from './types';
//# sourceMappingURL=index.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/ageGenderNet/index.ts"],"names":[],"mappings":"AAAA,cAAc,gBAAgB,CAAC;AAC/B,cAAc,SAAS,CAAC"}

View File

@ -1,2 +1,3 @@
export * from './AgeGenderNet';
export * from './types';
//# sourceMappingURL=index.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/ageGenderNet/index.ts"],"names":[],"mappings":"AAAA,cAAc,gBAAgB,CAAC;AAC/B,cAAc,SAAS,CAAC"}

View File

@ -1,21 +1,22 @@
import * as tf from '../../dist/tfjs.esm';
import { FCParams } from '../common/index';
export declare enum Gender {
FEMALE = "female",
MALE = "male"
}
export type AgeAndGenderPrediction = {
import * as tf from '@tensorflow/tfjs';
import { FCParams } from '../common';
export declare type AgeAndGenderPrediction = {
age: number;
gender: Gender;
genderProbability: number;
};
export type NetOutput = {
export declare enum Gender {
FEMALE = "female",
MALE = "male"
}
export declare type NetOutput = {
age: tf.Tensor1D;
gender: tf.Tensor2D;
};
export type NetParams = {
export declare type NetParams = {
fc: {
age: FCParams;
gender: FCParams;
};
};
//# sourceMappingURL=types.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"types.d.ts","sourceRoot":"","sources":["../../../src/ageGenderNet/types.ts"],"names":[],"mappings":"AAAA,OAAO,KAAK,EAAE,MAAM,kBAAkB,CAAC;AAEvC,OAAO,EAAE,QAAQ,EAAE,MAAM,WAAW,CAAC;AAErC,oBAAY,sBAAsB,GAAG;IACnC,GAAG,EAAE,MAAM,CAAA;IACX,MAAM,EAAE,MAAM,CAAA;IACd,iBAAiB,EAAE,MAAM,CAAA;CAC1B,CAAA;AAED,oBAAY,MAAM;IAChB,MAAM,WAAW;IACjB,IAAI,SAAS;CACd;AAED,oBAAY,SAAS,GAAG;IAAE,GAAG,EAAE,EAAE,CAAC,QAAQ,CAAC;IAAC,MAAM,EAAE,EAAE,CAAC,QAAQ,CAAA;CAAE,CAAA;AAEjE,oBAAY,SAAS,GAAG;IACtB,EAAE,EAAE;QACF,GAAG,EAAE,QAAQ,CAAA;QACb,MAAM,EAAE,QAAQ,CAAA;KACjB,CAAA;CACF,CAAA"}

View File

@ -0,0 +1,6 @@
export var Gender;
(function (Gender) {
Gender["FEMALE"] = "female";
Gender["MALE"] = "male";
})(Gender || (Gender = {}));
//# sourceMappingURL=types.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"types.js","sourceRoot":"","sources":["../../../src/ageGenderNet/types.ts"],"names":[],"mappings":"AAUA,MAAM,CAAN,IAAY,MAGX;AAHD,WAAY,MAAM;IAChB,2BAAiB,CAAA;IACjB,uBAAa,CAAA;AACf,CAAC,EAHW,MAAM,KAAN,MAAM,QAGjB"}

View File

@ -5,6 +5,7 @@ export interface IBoundingBox {
right: number;
bottom: number;
}
export declare class BoundingBox extends Box implements IBoundingBox {
export declare class BoundingBox extends Box<BoundingBox> implements IBoundingBox {
constructor(left: number, top: number, right: number, bottom: number, allowNegativeDimensions?: boolean);
}
//# sourceMappingURL=BoundingBox.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"BoundingBox.d.ts","sourceRoot":"","sources":["../../../src/classes/BoundingBox.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAC;AAE5B,MAAM,WAAW,YAAY;IAC3B,IAAI,EAAE,MAAM,CAAA;IACZ,GAAG,EAAE,MAAM,CAAA;IACX,KAAK,EAAE,MAAM,CAAA;IACb,MAAM,EAAE,MAAM,CAAA;CACf;AAED,qBAAa,WAAY,SAAQ,GAAG,CAAC,WAAW,CAAE,YAAW,YAAY;gBAC3D,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,EAAE,uBAAuB,GAAE,OAAe;CAG/G"}

View File

@ -0,0 +1,7 @@
import { Box } from './Box';
export class BoundingBox extends Box {
constructor(left, top, right, bottom, allowNegativeDimensions = false) {
super({ left, top, right, bottom }, allowNegativeDimensions);
}
}
//# sourceMappingURL=BoundingBox.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"BoundingBox.js","sourceRoot":"","sources":["../../../src/classes/BoundingBox.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAC;AAS5B,MAAM,OAAO,WAAY,SAAQ,GAAgB;IAC/C,YAAY,IAAY,EAAE,GAAW,EAAE,KAAa,EAAE,MAAc,EAAE,0BAAmC,KAAK;QAC5G,KAAK,CAAC,EAAE,IAAI,EAAE,GAAG,EAAE,KAAK,EAAE,MAAM,EAAE,EAAE,uBAAuB,CAAC,CAAA;IAC9D,CAAC;CACF"}

View File

@ -44,3 +44,4 @@ export declare class Box<BoxType = any> implements IBoundingBox, IRect {
};
calibrate(region: Box): Box<any>;
}
//# sourceMappingURL=Box.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"Box.d.ts","sourceRoot":"","sources":["../../../src/classes/Box.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAC;AAC7C,OAAO,EAAE,WAAW,EAAE,MAAM,cAAc,CAAC;AAC3C,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAChC,OAAO,EAAE,KAAK,EAAE,MAAM,QAAQ,CAAC;AAE/B,qBAAa,GAAG,CAAC,OAAO,GAAG,GAAG,CAAE,YAAW,YAAY,EAAE,KAAK;WAE9C,MAAM,CAAC,IAAI,EAAE,GAAG,GAAG,OAAO;WAI1B,gBAAgB,CAAC,GAAG,EAAE,GAAG,EAAE,MAAM,EAAE,MAAM,EAAE,uBAAuB,GAAE,OAAe;IAUjG,OAAO,CAAC,EAAE,CAAQ;IAClB,OAAO,CAAC,EAAE,CAAQ;IAClB,OAAO,CAAC,MAAM,CAAQ;IACtB,OAAO,CAAC,OAAO,CAAQ;gBAEX,IAAI,EAAE,YAAY,GAAG,KAAK,EAAE,uBAAuB,GAAE,OAAc;IAsB/E,IAAW,CAAC,IAAI,MAAM,CAAmB;IACzC,IAAW,CAAC,IAAI,MAAM,CAAmB;IACzC,IAAW,KAAK,IAAI,MAAM,CAAuB;IACjD,IAAW,MAAM,IAAI,MAAM,CAAwB;IACnD,IAAW,IAAI,IAAI,MAAM,CAAkB;IAC3C,IAAW,GAAG,IAAI,MAAM,CAAkB;IAC1C,IAAW,KAAK,IAAI,MAAM,CAA+B;IACzD,IAAW,MAAM,IAAI,MAAM,CAAgC;IAC3D,IAAW,IAAI,IAAI,MAAM,CAAoC;IAC7D,IAAW,OAAO,IAAI,KAAK,CAA0C;IACrE,IAAW,QAAQ,IAAI,KAAK,CAA2C;IACvE,IAAW,UAAU,IAAI,KAAK,CAA6C;IAC3E,IAAW,WAAW,IAAI,KAAK,CAA8C;IAEtE,KAAK,IAAI,GAAG,CAAC,OAAO,CAAC;IAMrB,KAAK,IAAI,GAAG,CAAC,OAAO,CAAC;IAMrB,QAAQ,IAAI,GAAG,CAAC,OAAO,CAAC;IAexB,OAAO,CAAC,CAAC,EAAE,WAAW,GAAG,MAAM,GAAG,GAAG,CAAC,OAAO,CAAC;IAW9C,GAAG,CAAC,IAAI,EAAE,MAAM,EAAE,IAAI,EAAE,MAAM,GAAG,GAAG,CAAC,OAAO,CAAC;IAU7C,kBAAkB,CAAC,QAAQ,EAAE,MAAM,EAAE,SAAS,EAAE,MAAM,GAAG,GAAG,CAAC,OAAO,CAAC;IAarE,KAAK,CAAC,EAAE,EAAE,MAAM,EAAE,EAAE,EAAE,MAAM,GAAG,GAAG,CAAC,OAAO,CAAC;IAQ3C,YAAY,CAAC,WAAW,EAAE,MAAM,EAAE,UAAU,EAAE,MAAM;;;;;;;;;;;;IAkCpD,SAAS,CAAC,MAAM,EAAE,GAAG;CAQ7B"}

140
build/src/classes/Box.js Normal file
View File

@ -0,0 +1,140 @@
import { isDimensions, isValidNumber } from '../utils';
import { Point } from './Point';
export class Box {
constructor(_box, allowNegativeDimensions = true) {
const box = (_box || {});
const isBbox = [box.left, box.top, box.right, box.bottom].every(isValidNumber);
const isRect = [box.x, box.y, box.width, box.height].every(isValidNumber);
if (!isRect && !isBbox) {
throw new Error(`Box.constructor - expected box to be IBoundingBox | IRect, instead have ${JSON.stringify(box)}`);
}
const [x, y, width, height] = isRect
? [box.x, box.y, box.width, box.height]
: [box.left, box.top, box.right - box.left, box.bottom - box.top];
Box.assertIsValidBox({ x, y, width, height }, 'Box.constructor', allowNegativeDimensions);
this._x = x;
this._y = y;
this._width = width;
this._height = height;
}
static isRect(rect) {
return !!rect && [rect.x, rect.y, rect.width, rect.height].every(isValidNumber);
}
static assertIsValidBox(box, callee, allowNegativeDimensions = false) {
if (!Box.isRect(box)) {
throw new Error(`${callee} - invalid box: ${JSON.stringify(box)}, expected object with properties x, y, width, height`);
}
if (!allowNegativeDimensions && (box.width < 0 || box.height < 0)) {
throw new Error(`${callee} - width (${box.width}) and height (${box.height}) must be positive numbers`);
}
}
get x() { return this._x; }
get y() { return this._y; }
get width() { return this._width; }
get height() { return this._height; }
get left() { return this.x; }
get top() { return this.y; }
get right() { return this.x + this.width; }
get bottom() { return this.y + this.height; }
get area() { return this.width * this.height; }
get topLeft() { return new Point(this.left, this.top); }
get topRight() { return new Point(this.right, this.top); }
get bottomLeft() { return new Point(this.left, this.bottom); }
get bottomRight() { return new Point(this.right, this.bottom); }
round() {
const [x, y, width, height] = [this.x, this.y, this.width, this.height]
.map(val => Math.round(val));
return new Box({ x, y, width, height });
}
floor() {
const [x, y, width, height] = [this.x, this.y, this.width, this.height]
.map(val => Math.floor(val));
return new Box({ x, y, width, height });
}
toSquare() {
let { x, y, width, height } = this;
const diff = Math.abs(width - height);
if (width < height) {
x -= (diff / 2);
width += diff;
}
if (height < width) {
y -= (diff / 2);
height += diff;
}
return new Box({ x, y, width, height });
}
rescale(s) {
const scaleX = isDimensions(s) ? s.width : s;
const scaleY = isDimensions(s) ? s.height : s;
return new Box({
x: this.x * scaleX,
y: this.y * scaleY,
width: this.width * scaleX,
height: this.height * scaleY
});
}
pad(padX, padY) {
let [x, y, width, height] = [
this.x - (padX / 2),
this.y - (padY / 2),
this.width + padX,
this.height + padY
];
return new Box({ x, y, width, height });
}
clipAtImageBorders(imgWidth, imgHeight) {
const { x, y, right, bottom } = this;
const clippedX = Math.max(x, 0);
const clippedY = Math.max(y, 0);
const newWidth = right - clippedX;
const newHeight = bottom - clippedY;
const clippedWidth = Math.min(newWidth, imgWidth - clippedX);
const clippedHeight = Math.min(newHeight, imgHeight - clippedY);
return (new Box({ x: clippedX, y: clippedY, width: clippedWidth, height: clippedHeight })).floor();
}
shift(sx, sy) {
const { width, height } = this;
const x = this.x + sx;
const y = this.y + sy;
return new Box({ x, y, width, height });
}
padAtBorders(imageHeight, imageWidth) {
const w = this.width + 1;
const h = this.height + 1;
let dx = 1;
let dy = 1;
let edx = w;
let edy = h;
let x = this.left;
let y = this.top;
let ex = this.right;
let ey = this.bottom;
if (ex > imageWidth) {
edx = -ex + imageWidth + w;
ex = imageWidth;
}
if (ey > imageHeight) {
edy = -ey + imageHeight + h;
ey = imageHeight;
}
if (x < 1) {
edy = 2 - x;
x = 1;
}
if (y < 1) {
edy = 2 - y;
y = 1;
}
return { dy, edy, dx, edx, y, ey, x, ex, w, h };
}
calibrate(region) {
return new Box({
left: this.left + (region.left * this.width),
top: this.top + (region.top * this.height),
right: this.right + (region.right * this.width),
bottom: this.bottom + (region.bottom * this.height)
}).toSquare().round();
}
}
//# sourceMappingURL=Box.js.map

File diff suppressed because one or more lines are too long

View File

@ -10,3 +10,4 @@ export declare class Dimensions implements IDimensions {
get height(): number;
reverse(): Dimensions;
}
//# sourceMappingURL=Dimensions.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"Dimensions.d.ts","sourceRoot":"","sources":["../../../src/classes/Dimensions.ts"],"names":[],"mappings":"AAEA,MAAM,WAAW,WAAW;IAC1B,KAAK,EAAE,MAAM,CAAA;IACb,MAAM,EAAE,MAAM,CAAA;CACf;AAED,qBAAa,UAAW,YAAW,WAAW;IAE5C,OAAO,CAAC,MAAM,CAAQ;IACtB,OAAO,CAAC,OAAO,CAAQ;gBAEX,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM;IASzC,IAAW,KAAK,IAAI,MAAM,CAAuB;IACjD,IAAW,MAAM,IAAI,MAAM,CAAwB;IAE5C,OAAO,IAAI,UAAU;CAG7B"}

View File

@ -0,0 +1,16 @@
import { isValidNumber } from '../utils';
export class Dimensions {
constructor(width, height) {
if (!isValidNumber(width) || !isValidNumber(height)) {
throw new Error(`Dimensions.constructor - expected width and height to be valid numbers, instead have ${JSON.stringify({ width, height })}`);
}
this._width = width;
this._height = height;
}
get width() { return this._width; }
get height() { return this._height; }
reverse() {
return new Dimensions(1 / this.width, 1 / this.height);
}
}
//# sourceMappingURL=Dimensions.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"Dimensions.js","sourceRoot":"","sources":["../../../src/classes/Dimensions.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAOzC,MAAM,OAAO,UAAU;IAKrB,YAAY,KAAa,EAAE,MAAc;QACvC,IAAI,CAAC,aAAa,CAAC,KAAK,CAAC,IAAI,CAAC,aAAa,CAAC,MAAM,CAAC,EAAE;YACnD,MAAM,IAAI,KAAK,CAAC,wFAAwF,IAAI,CAAC,SAAS,CAAC,EAAE,KAAK,EAAE,MAAM,EAAE,CAAC,EAAE,CAAC,CAAA;SAC7I;QAED,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,CAAC,OAAO,GAAG,MAAM,CAAA;IACvB,CAAC;IAED,IAAW,KAAK,KAAa,OAAO,IAAI,CAAC,MAAM,CAAA,CAAC,CAAC;IACjD,IAAW,MAAM,KAAa,OAAO,IAAI,CAAC,OAAO,CAAA,CAAC,CAAC;IAE5C,OAAO;QACZ,OAAO,IAAI,UAAU,CAAC,CAAC,GAAG,IAAI,CAAC,KAAK,EAAE,CAAC,GAAG,IAAI,CAAC,MAAM,CAAC,CAAA;IACxD,CAAC;CACF"}

View File

@ -10,3 +10,4 @@ export declare class FaceDetection extends ObjectDetection implements IFaceDetec
constructor(score: number, relativeBox: Rect, imageDims: IDimensions);
forSize(width: number, height: number): FaceDetection;
}
//# sourceMappingURL=FaceDetection.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"FaceDetection.d.ts","sourceRoot":"","sources":["../../../src/classes/FaceDetection.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAC;AAC5B,OAAO,EAAE,WAAW,EAAE,MAAM,cAAc,CAAC;AAC3C,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AACpD,OAAO,EAAE,IAAI,EAAE,MAAM,QAAQ,CAAC;AAE9B,MAAM,WAAW,aAAa;IAC5B,KAAK,EAAE,MAAM,CAAA;IACb,GAAG,EAAE,GAAG,CAAA;CACT;AAED,qBAAa,aAAc,SAAQ,eAAgB,YAAW,aAAa;gBAEvE,KAAK,EAAE,MAAM,EACb,WAAW,EAAE,IAAI,EACjB,SAAS,EAAE,WAAW;IAKjB,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,aAAa;CAI7D"}

View File

@ -0,0 +1,11 @@
import { ObjectDetection } from './ObjectDetection';
export class FaceDetection extends ObjectDetection {
constructor(score, relativeBox, imageDims) {
super(score, score, '', relativeBox, imageDims);
}
forSize(width, height) {
const { score, relativeBox, imageDims } = super.forSize(width, height);
return new FaceDetection(score, relativeBox, imageDims);
}
}
//# sourceMappingURL=FaceDetection.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"FaceDetection.js","sourceRoot":"","sources":["../../../src/classes/FaceDetection.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAC;AAQpD,MAAM,OAAO,aAAc,SAAQ,eAAe;IAChD,YACE,KAAa,EACb,WAAiB,EACjB,SAAsB;QAEtB,KAAK,CAAC,KAAK,EAAE,KAAK,EAAE,EAAE,EAAE,WAAW,EAAE,SAAS,CAAC,CAAA;IACjD,CAAC;IAEM,OAAO,CAAC,KAAa,EAAE,MAAc;QAC1C,MAAM,EAAE,KAAK,EAAE,WAAW,EAAE,SAAS,EAAE,GAAG,KAAK,CAAC,OAAO,CAAC,KAAK,EAAE,MAAM,CAAC,CAAA;QACtE,OAAO,IAAI,aAAa,CAAC,KAAK,EAAE,WAAW,EAAE,SAAS,CAAC,CAAA;IACzD,CAAC;CACF"}

View File

@ -40,3 +40,4 @@ export declare class FaceLandmarks implements IFaceLandmarks {
private alignMinBbox;
protected getRefPointsForAlignment(): Point[];
}
//# sourceMappingURL=FaceLandmarks.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"FaceLandmarks.d.ts","sourceRoot":"","sources":["../../../src/classes/FaceLandmarks.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAC;AAC7C,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAC;AAC5B,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,MAAM,cAAc,CAAC;AACvD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAChC,OAAO,EAAE,KAAK,EAAQ,MAAM,QAAQ,CAAC;AAOrC,MAAM,WAAW,cAAc;IAC7B,SAAS,EAAE,KAAK,EAAE,CAAA;IAClB,KAAK,EAAE,KAAK,CAAA;CACb;AAED,qBAAa,aAAc,YAAW,cAAc;IAClD,SAAS,CAAC,MAAM,EAAE,KAAK,CAAA;IACvB,SAAS,CAAC,UAAU,EAAE,KAAK,EAAE,CAAA;IAC7B,SAAS,CAAC,QAAQ,EAAE,UAAU,CAAA;gBAG5B,6BAA6B,EAAE,KAAK,EAAE,EACtC,OAAO,EAAE,WAAW,EACpB,KAAK,GAAE,KAAuB;IAUhC,IAAW,KAAK,IAAI,KAAK,CAAmD;IAC5E,IAAW,UAAU,IAAI,MAAM,CAA+B;IAC9D,IAAW,WAAW,IAAI,MAAM,CAAgC;IAChE,IAAW,SAAS,IAAI,KAAK,EAAE,CAA2B;IAC1D,IAAW,iBAAiB,IAAI,KAAK,EAAE,CAItC;IAEM,OAAO,CAAC,CAAC,SAAS,aAAa,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,CAAC;IAOlE,OAAO,CAAC,CAAC,SAAS,aAAa,EAAE,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM,GAAG,CAAC;IAQzD,YAAY,CAAC,CAAC,SAAS,aAAa,EAAE,EAAE,EAAE,KAAK,GAAG,CAAC;IAI1D;;;;;;;;;;OAUG;IACI,KAAK,CACV,SAAS,CAAC,EAAE,aAAa,GAAG,KAAK,GAAG,YAAY,GAAG,IAAI,EACvD,OAAO,GAAE;QAAE,gBAAgB,CAAC,EAAE,OAAO,CAAC;QAAC,aAAa,CAAC,EAAE,MAAM,CAAA;KAAQ,GACpE,GAAG;IAkBN,OAAO,CAAC,SAAS;IAkBjB,OAAO,CAAC,YAAY;IAKpB,SAAS,CAAC,wBAAwB,IAAI,KAAK,EAAE;CAG9C"}

View File

@ -0,0 +1,79 @@
import { minBbox } from '../ops';
import { getCenterPoint } from '../utils';
import { Box } from './Box';
import { Dimensions } from './Dimensions';
import { FaceDetection } from './FaceDetection';
import { Point } from './Point';
import { Rect } from './Rect';
// face alignment constants
const relX = 0.5;
const relY = 0.43;
const relScale = 0.45;
export class FaceLandmarks {
constructor(relativeFaceLandmarkPositions, imgDims, shift = new Point(0, 0)) {
const { width, height } = imgDims;
this._imgDims = new Dimensions(width, height);
this._shift = shift;
this._positions = relativeFaceLandmarkPositions.map(pt => pt.mul(new Point(width, height)).add(shift));
}
get shift() { return new Point(this._shift.x, this._shift.y); }
get imageWidth() { return this._imgDims.width; }
get imageHeight() { return this._imgDims.height; }
get positions() { return this._positions; }
get relativePositions() {
return this._positions.map(pt => pt.sub(this._shift).div(new Point(this.imageWidth, this.imageHeight)));
}
forSize(width, height) {
return new this.constructor(this.relativePositions, { width, height });
}
shiftBy(x, y) {
return new this.constructor(this.relativePositions, this._imgDims, new Point(x, y));
}
shiftByPoint(pt) {
return this.shiftBy(pt.x, pt.y);
}
/**
* Aligns the face landmarks after face detection from the relative positions of the faces
* bounding box, or it's current shift. This function should be used to align the face images
* after face detection has been performed, before they are passed to the face recognition net.
* This will make the computed face descriptor more accurate.
*
* @param detection (optional) The bounding box of the face or the face detection result. If
* no argument was passed the position of the face landmarks are assumed to be relative to
* it's current shift.
* @returns The bounding box of the aligned face.
*/
align(detection, options = {}) {
if (detection) {
const box = detection instanceof FaceDetection
? detection.box.floor()
: new Box(detection);
return this.shiftBy(box.x, box.y).align(null, options);
}
const { useDlibAlignment, minBoxPadding } = Object.assign({}, { useDlibAlignment: false, minBoxPadding: 0.2 }, options);
if (useDlibAlignment) {
return this.alignDlib();
}
return this.alignMinBbox(minBoxPadding);
}
alignDlib() {
const centers = this.getRefPointsForAlignment();
const [leftEyeCenter, rightEyeCenter, mouthCenter] = centers;
const distToMouth = (pt) => mouthCenter.sub(pt).magnitude();
const eyeToMouthDist = (distToMouth(leftEyeCenter) + distToMouth(rightEyeCenter)) / 2;
const size = Math.floor(eyeToMouthDist / relScale);
const refPoint = getCenterPoint(centers);
// TODO: pad in case rectangle is out of image bounds
const x = Math.floor(Math.max(0, refPoint.x - (relX * size)));
const y = Math.floor(Math.max(0, refPoint.y - (relY * size)));
return new Rect(x, y, Math.min(size, this.imageWidth + x), Math.min(size, this.imageHeight + y));
}
alignMinBbox(padding) {
const box = minBbox(this.positions);
return box.pad(box.width * padding, box.height * padding);
}
getRefPointsForAlignment() {
throw new Error('getRefPointsForAlignment not implemented by base class');
}
}
//# sourceMappingURL=FaceLandmarks.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"FaceLandmarks.js","sourceRoot":"","sources":["../../../src/classes/FaceLandmarks.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,QAAQ,CAAC;AACjC,OAAO,EAAE,cAAc,EAAE,MAAM,UAAU,CAAC;AAE1C,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAC;AAC5B,OAAO,EAAE,UAAU,EAAe,MAAM,cAAc,CAAC;AACvD,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAChC,OAAO,EAAS,IAAI,EAAE,MAAM,QAAQ,CAAC;AAErC,2BAA2B;AAC3B,MAAM,IAAI,GAAG,GAAG,CAAA;AAChB,MAAM,IAAI,GAAG,IAAI,CAAA;AACjB,MAAM,QAAQ,GAAG,IAAI,CAAA;AAOrB,MAAM,OAAO,aAAa;IAKxB,YACE,6BAAsC,EACtC,OAAoB,EACpB,QAAe,IAAI,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC;QAE9B,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,OAAO,CAAA;QACjC,IAAI,CAAC,QAAQ,GAAG,IAAI,UAAU,CAAC,KAAK,EAAE,MAAM,CAAC,CAAA;QAC7C,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,CAAC,UAAU,GAAG,6BAA6B,CAAC,GAAG,CACjD,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,KAAK,CAAC,KAAK,EAAE,MAAM,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAClD,CAAA;IACH,CAAC;IAED,IAAW,KAAK,KAAY,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,CAAA,CAAC,CAAC;IAC5E,IAAW,UAAU,KAAa,OAAO,IAAI,CAAC,QAAQ,CAAC,KAAK,CAAA,CAAC,CAAC;IAC9D,IAAW,WAAW,KAAa,OAAO,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAA,CAAC,CAAC;IAChE,IAAW,SAAS,KAAc,OAAO,IAAI,CAAC,UAAU,CAAA,CAAC,CAAC;IAC1D,IAAW,iBAAiB;QAC1B,OAAO,IAAI,CAAC,UAAU,CAAC,GAAG,CACxB,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,GAAG,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,GAAG,CAAC,IAAI,KAAK,CAAC,IAAI,CAAC,UAAU,EAAE,IAAI,CAAC,WAAW,CAAC,CAAC,CAC5E,CAAA;IACH,CAAC;IAEM,OAAO,CAA0B,KAAa,EAAE,MAAc;QACnE,OAAO,IAAK,IAAI,CAAC,WAAmB,CAClC,IAAI,CAAC,iBAAiB,EACtB,EAAE,KAAK,EAAE,MAAM,EAAE,CAClB,CAAA;IACH,CAAC;IAEM,OAAO,CAA0B,CAAS,EAAE,CAAS;QAC1D,OAAO,IAAK,IAAI,CAAC,WAAmB,CAClC,IAAI,CAAC,iBAAiB,EACtB,IAAI,CAAC,QAAQ,EACb,IAAI,KAAK,CAAC,CAAC,EAAE,CAAC,CAAC,CAChB,CAAA;IACH,CAAC;IAEM,YAAY,CAA0B,EAAS;QACpD,OAAO,IAAI,CAAC,OAAO,CAAC,EAAE,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAC,CAAA;IACjC,CAAC;IAED;;;;;;;;;;OAUG;IACI,KAAK,CACV,SAAuD,EACvD,UAAkE,EAAG;QAErE,IAAI,SAAS,EAAE;YACb,MAAM,GAAG,GAAG,SAAS,YAAY,aAAa;gBAC5C,CAAC,CAAC,SAAS,CAAC,GAAG,CAAC,KAAK,EAAE;gBACvB,CAAC,CAAC,IAAI,GAAG,CAAC,SAAS,CAAC,CAAA;YAEtB,OAAO,IAAI,CAAC,OAAO,CAAC,GAAG,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;SACvD;QAED,MAAM,EAAE,gBAAgB,EAAE,aAAa,EAAE,GAAG,MAAM,CAAC,MAAM,CAAC,EAAE,EAAE,EAAE,gBAAgB,EAAE,KAAK,EAAE,aAAa,EAAE,GAAG,EAAE,EAAE,OAAO,CAAC,CAAA;QAEvH,IAAI,gBAAgB,EAAE;YACpB,OAAO,IAAI,CAAC,SAAS,EAAE,CAAA;SACxB;QAED,OAAO,IAAI,CAAC,YAAY,CAAC,aAAa,CAAC,CAAA;IACzC,CAAC;IAEO,SAAS;QAEf,MAAM,OAAO,GAAG,IAAI,CAAC,wBAAwB,EAAE,CAAA;QAE/C,MAAM,CAAC,aAAa,EAAE,cAAc,EAAE,WAAW,CAAC,GAAG,OAAO,CAAA;QAC5D,MAAM,WAAW,GAAG,CAAC,EAAS,EAAE,EAAE,CAAC,WAAW,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,SAAS,EAAE,CAAA;QAClE,MAAM,cAAc,GAAG,CAAC,WAAW,CAAC,aAAa,CAAC,GAAG,WAAW,CAAC,cAAc,CAAC,CAAC,GAAG,CAAC,CAAA;QAErF,MAAM,IAAI,GAAG,IAAI,CAAC,KAAK,CAAC,cAAc,GAAG,QAAQ,CAAC,CAAA;QAElD,MAAM,QAAQ,GAAG,cAAc,CAAC,OAAO,CAAC,CAAA;QACxC,qDAAqD;QACrD,MAAM,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC,CAAA;QAC7D,MAAM,CAAC,GAAG,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,EAAE,QAAQ,CAAC,CAAC,GAAG,CAAC,IAAI,GAAG,IAAI,CAAC,CAAC,CAAC,CAAA;QAE7D,OAAO,IAAI,IAAI,CAAC,CAAC,EAAE,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,UAAU,GAAG,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,IAAI,EAAE,IAAI,CAAC,WAAW,GAAG,CAAC,CAAC,CAAC,CAAA;IAClG,CAAC;IAEO,YAAY,CAAC,OAAe;QAClC,MAAM,GAAG,GAAG,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,CAAA;QACnC,OAAO,GAAG,CAAC,GAAG,CAAC,GAAG,CAAC,KAAK,GAAG,OAAO,EAAE,GAAG,CAAC,MAAM,GAAG,OAAO,CAAC,CAAA;IAC3D,CAAC;IAES,wBAAwB;QAChC,MAAM,IAAI,KAAK,CAAC,wDAAwD,CAAC,CAAA;IAC3E,CAAC;CACF"}

View File

@ -3,3 +3,4 @@ import { Point } from './Point';
export declare class FaceLandmarks5 extends FaceLandmarks {
protected getRefPointsForAlignment(): Point[];
}
//# sourceMappingURL=FaceLandmarks5.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"FaceLandmarks5.d.ts","sourceRoot":"","sources":["../../../src/classes/FaceLandmarks5.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAGhC,qBAAa,cAAe,SAAQ,aAAa;IAE/C,SAAS,CAAC,wBAAwB,IAAI,KAAK,EAAE;CAQ9C"}

View File

@ -0,0 +1,13 @@
import { getCenterPoint } from '../utils';
import { FaceLandmarks } from './FaceLandmarks';
export class FaceLandmarks5 extends FaceLandmarks {
getRefPointsForAlignment() {
const pts = this.positions;
return [
pts[0],
pts[1],
getCenterPoint([pts[3], pts[4]])
];
}
}
//# sourceMappingURL=FaceLandmarks5.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"FaceLandmarks5.js","sourceRoot":"","sources":["../../../src/classes/FaceLandmarks5.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,UAAU,CAAC;AAC1C,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAIhD,MAAM,OAAO,cAAe,SAAQ,aAAa;IAErC,wBAAwB;QAChC,MAAM,GAAG,GAAG,IAAI,CAAC,SAAS,CAAA;QAC1B,OAAO;YACL,GAAG,CAAC,CAAC,CAAC;YACN,GAAG,CAAC,CAAC,CAAC;YACN,cAAc,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC;SACjC,CAAA;IACH,CAAC;CACF"}

View File

@ -10,3 +10,4 @@ export declare class FaceLandmarks68 extends FaceLandmarks {
getMouth(): Point[];
protected getRefPointsForAlignment(): Point[];
}
//# sourceMappingURL=FaceLandmarks68.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"FaceLandmarks68.d.ts","sourceRoot":"","sources":["../../../src/classes/FaceLandmarks68.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAChD,OAAO,EAAE,KAAK,EAAE,MAAM,SAAS,CAAC;AAEhC,qBAAa,eAAgB,SAAQ,aAAa;IACzC,aAAa,IAAI,KAAK,EAAE;IAIxB,cAAc,IAAI,KAAK,EAAE;IAIzB,eAAe,IAAI,KAAK,EAAE;IAI1B,OAAO,IAAI,KAAK,EAAE;IAIlB,UAAU,IAAI,KAAK,EAAE;IAIrB,WAAW,IAAI,KAAK,EAAE;IAItB,QAAQ,IAAI,KAAK,EAAE;IAI1B,SAAS,CAAC,wBAAwB,IAAI,KAAK,EAAE;CAO9C"}

View File

@ -0,0 +1,33 @@
import { getCenterPoint } from '../utils';
import { FaceLandmarks } from './FaceLandmarks';
export class FaceLandmarks68 extends FaceLandmarks {
getJawOutline() {
return this.positions.slice(0, 17);
}
getLeftEyeBrow() {
return this.positions.slice(17, 22);
}
getRightEyeBrow() {
return this.positions.slice(22, 27);
}
getNose() {
return this.positions.slice(27, 36);
}
getLeftEye() {
return this.positions.slice(36, 42);
}
getRightEye() {
return this.positions.slice(42, 48);
}
getMouth() {
return this.positions.slice(48, 68);
}
getRefPointsForAlignment() {
return [
this.getLeftEye(),
this.getRightEye(),
this.getMouth()
].map(getCenterPoint);
}
}
//# sourceMappingURL=FaceLandmarks68.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"FaceLandmarks68.js","sourceRoot":"","sources":["../../../src/classes/FaceLandmarks68.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,cAAc,EAAE,MAAM,UAAU,CAAC;AAC1C,OAAO,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAC;AAGhD,MAAM,OAAO,eAAgB,SAAQ,aAAa;IACzC,aAAa;QAClB,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,CAAC,EAAE,EAAE,CAAC,CAAA;IACpC,CAAC;IAEM,cAAc;QACnB,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE,EAAE,EAAE,CAAC,CAAA;IACrC,CAAC;IAEM,eAAe;QACpB,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE,EAAE,EAAE,CAAC,CAAA;IACrC,CAAC;IAEM,OAAO;QACZ,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE,EAAE,EAAE,CAAC,CAAA;IACrC,CAAC;IAEM,UAAU;QACf,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE,EAAE,EAAE,CAAC,CAAA;IACrC,CAAC;IAEM,WAAW;QAChB,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE,EAAE,EAAE,CAAC,CAAA;IACrC,CAAC;IAEM,QAAQ;QACb,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAC,EAAE,EAAE,EAAE,CAAC,CAAA;IACrC,CAAC;IAES,wBAAwB;QAChC,OAAO;YACL,IAAI,CAAC,UAAU,EAAE;YACjB,IAAI,CAAC,WAAW,EAAE;YAClB,IAAI,CAAC,QAAQ,EAAE;SAChB,CAAC,GAAG,CAAC,cAAc,CAAC,CAAA;IACvB,CAAC;CACF"}

View File

@ -10,3 +10,4 @@ export declare class FaceMatch implements IFaceMatch {
get distance(): number;
toString(withDistance?: boolean): string;
}
//# sourceMappingURL=FaceMatch.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"FaceMatch.d.ts","sourceRoot":"","sources":["../../../src/classes/FaceMatch.ts"],"names":[],"mappings":"AAEA,MAAM,WAAW,UAAU;IACzB,KAAK,EAAE,MAAM,CAAA;IACb,QAAQ,EAAE,MAAM,CAAA;CACjB;AAED,qBAAa,SAAU,YAAW,UAAU;IAC1C,OAAO,CAAC,MAAM,CAAQ;IACtB,OAAO,CAAC,SAAS,CAAQ;gBAEb,KAAK,EAAE,MAAM,EAAE,QAAQ,EAAE,MAAM;IAK3C,IAAW,KAAK,IAAI,MAAM,CAAuB;IACjD,IAAW,QAAQ,IAAI,MAAM,CAA0B;IAEhD,QAAQ,CAAC,YAAY,GAAE,OAAc,GAAG,MAAM;CAGtD"}

View File

@ -0,0 +1,13 @@
import { round } from '../utils';
export class FaceMatch {
constructor(label, distance) {
this._label = label;
this._distance = distance;
}
get label() { return this._label; }
get distance() { return this._distance; }
toString(withDistance = true) {
return `${this.label}${withDistance ? ` (${round(this.distance)})` : ''}`;
}
}
//# sourceMappingURL=FaceMatch.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"FaceMatch.js","sourceRoot":"","sources":["../../../src/classes/FaceMatch.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,MAAM,UAAU,CAAC;AAOjC,MAAM,OAAO,SAAS;IAIpB,YAAY,KAAa,EAAE,QAAgB;QACzC,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,CAAC,SAAS,GAAG,QAAQ,CAAA;IAC3B,CAAC;IAED,IAAW,KAAK,KAAa,OAAO,IAAI,CAAC,MAAM,CAAA,CAAC,CAAC;IACjD,IAAW,QAAQ,KAAa,OAAO,IAAI,CAAC,SAAS,CAAA,CAAC,CAAC;IAEhD,QAAQ,CAAC,eAAwB,IAAI;QAC1C,OAAO,GAAG,IAAI,CAAC,KAAK,GAAG,YAAY,CAAC,CAAC,CAAC,KAAK,KAAK,CAAC,IAAI,CAAC,QAAQ,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAA;IAC3E,CAAC;CACF"}

View File

@ -1,9 +1,10 @@
import { IBoundingBox } from './BoundingBox';
import { Box } from './Box';
import { IRect } from './Rect';
export declare class LabeledBox extends Box {
export declare class LabeledBox extends Box<LabeledBox> {
static assertIsValidLabeledBox(box: any, callee: string): void;
private _label;
constructor(box: IBoundingBox | IRect | any, label: number);
get label(): number;
}
//# sourceMappingURL=LabeledBox.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"LabeledBox.d.ts","sourceRoot":"","sources":["../../../src/classes/LabeledBox.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,YAAY,EAAE,MAAM,eAAe,CAAC;AAC7C,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAC;AAC5B,OAAO,EAAE,KAAK,EAAE,MAAM,QAAQ,CAAC;AAE/B,qBAAa,UAAW,SAAQ,GAAG,CAAC,UAAU,CAAC;WAE/B,uBAAuB,CAAC,GAAG,EAAE,GAAG,EAAE,MAAM,EAAE,MAAM;IAQ9D,OAAO,CAAC,MAAM,CAAQ;gBAEV,GAAG,EAAE,YAAY,GAAG,KAAK,GAAG,GAAG,EAAE,KAAK,EAAE,MAAM;IAK1D,IAAW,KAAK,IAAI,MAAM,CAAuB;CAElD"}

View File

@ -0,0 +1,16 @@
import { isValidNumber } from '../utils';
import { Box } from './Box';
export class LabeledBox extends Box {
constructor(box, label) {
super(box);
this._label = label;
}
static assertIsValidLabeledBox(box, callee) {
Box.assertIsValidBox(box, callee);
if (!isValidNumber(box.label)) {
throw new Error(`${callee} - expected property label (${box.label}) to be a number`);
}
}
get label() { return this._label; }
}
//# sourceMappingURL=LabeledBox.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"LabeledBox.js","sourceRoot":"","sources":["../../../src/classes/LabeledBox.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,aAAa,EAAE,MAAM,UAAU,CAAC;AAEzC,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAC;AAG5B,MAAM,OAAO,UAAW,SAAQ,GAAe;IAY7C,YAAY,GAA+B,EAAE,KAAa;QACxD,KAAK,CAAC,GAAG,CAAC,CAAA;QACV,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;IACrB,CAAC;IAbM,MAAM,CAAC,uBAAuB,CAAC,GAAQ,EAAE,MAAc;QAC5D,GAAG,CAAC,gBAAgB,CAAC,GAAG,EAAE,MAAM,CAAC,CAAA;QAEjC,IAAI,CAAC,aAAa,CAAC,GAAG,CAAC,KAAK,CAAC,EAAE;YAC7B,MAAM,IAAI,KAAK,CAAC,GAAG,MAAM,+BAA+B,GAAG,CAAC,KAAK,kBAAkB,CAAC,CAAA;SACrF;IACH,CAAC;IASD,IAAW,KAAK,KAAa,OAAO,IAAI,CAAC,MAAM,CAAA,CAAC,CAAC;CAElD"}

View File

@ -7,3 +7,4 @@ export declare class LabeledFaceDescriptors {
toJSON(): any;
static fromJSON(json: any): LabeledFaceDescriptors;
}
//# sourceMappingURL=LabeledFaceDescriptors.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"LabeledFaceDescriptors.d.ts","sourceRoot":"","sources":["../../../src/classes/LabeledFaceDescriptors.ts"],"names":[],"mappings":"AAAA,qBAAa,sBAAsB;IACjC,OAAO,CAAC,MAAM,CAAQ;IACtB,OAAO,CAAC,YAAY,CAAgB;gBAExB,KAAK,EAAE,MAAM,EAAE,WAAW,EAAE,YAAY,EAAE;IAatD,IAAW,KAAK,IAAI,MAAM,CAAuB;IACjD,IAAW,WAAW,IAAI,YAAY,EAAE,CAA6B;IAE9D,MAAM,IAAI,GAAG;WAON,QAAQ,CAAC,IAAI,EAAE,GAAG,GAAG,sBAAsB;CAO1D"}

View File

@ -0,0 +1,27 @@
export class LabeledFaceDescriptors {
constructor(label, descriptors) {
if (!(typeof label === 'string')) {
throw new Error('LabeledFaceDescriptors - constructor expected label to be a string');
}
if (!Array.isArray(descriptors) || descriptors.some(desc => !(desc instanceof Float32Array))) {
throw new Error('LabeledFaceDescriptors - constructor expected descriptors to be an array of Float32Array');
}
this._label = label;
this._descriptors = descriptors;
}
get label() { return this._label; }
get descriptors() { return this._descriptors; }
toJSON() {
return {
label: this.label,
descriptors: this.descriptors.map((d) => Array.from(d))
};
}
static fromJSON(json) {
const descriptors = json.descriptors.map((d) => {
return new Float32Array(d);
});
return new LabeledFaceDescriptors(json.label, descriptors);
}
}
//# sourceMappingURL=LabeledFaceDescriptors.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"LabeledFaceDescriptors.js","sourceRoot":"","sources":["../../../src/classes/LabeledFaceDescriptors.ts"],"names":[],"mappings":"AAAA,MAAM,OAAO,sBAAsB;IAIjC,YAAY,KAAa,EAAE,WAA2B;QACpD,IAAI,CAAC,CAAC,OAAO,KAAK,KAAK,QAAQ,CAAC,EAAE;YAChC,MAAM,IAAI,KAAK,CAAC,oEAAoE,CAAC,CAAA;SACtF;QAED,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,WAAW,CAAC,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,IAAI,YAAY,YAAY,CAAC,CAAC,EAAE;YAC5F,MAAM,IAAI,KAAK,CAAC,0FAA0F,CAAC,CAAA;SAC5G;QAED,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,CAAC,YAAY,GAAG,WAAW,CAAA;IACjC,CAAC;IAED,IAAW,KAAK,KAAa,OAAO,IAAI,CAAC,MAAM,CAAA,CAAC,CAAC;IACjD,IAAW,WAAW,KAAqB,OAAO,IAAI,CAAC,YAAY,CAAA,CAAC,CAAC;IAE9D,MAAM;QACX,OAAO;YACL,KAAK,EAAE,IAAI,CAAC,KAAK;YACjB,WAAW,EAAE,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,EAAE,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC;SACxD,CAAC;IACJ,CAAC;IAEM,MAAM,CAAC,QAAQ,CAAC,IAAS;QAC9B,MAAM,WAAW,GAAG,IAAI,CAAC,WAAW,CAAC,GAAG,CAAC,CAAC,CAAM,EAAE,EAAE;YAClD,OAAO,IAAI,YAAY,CAAC,CAAC,CAAC,CAAC;QAC7B,CAAC,CAAC,CAAC;QACH,OAAO,IAAI,sBAAsB,CAAC,IAAI,CAAC,KAAK,EAAE,WAAW,CAAC,CAAC;IAC7D,CAAC;CAEF"}

View File

@ -18,3 +18,4 @@ export declare class ObjectDetection {
get relativeBox(): Box;
forSize(width: number, height: number): ObjectDetection;
}
//# sourceMappingURL=ObjectDetection.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"ObjectDetection.d.ts","sourceRoot":"","sources":["../../../src/classes/ObjectDetection.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAC;AAC5B,OAAO,EAAE,UAAU,EAAE,WAAW,EAAE,MAAM,cAAc,CAAC;AACvD,OAAO,EAAE,KAAK,EAAQ,MAAM,QAAQ,CAAC;AAErC,qBAAa,eAAe;IAC1B,OAAO,CAAC,MAAM,CAAQ;IACtB,OAAO,CAAC,WAAW,CAAQ;IAC3B,OAAO,CAAC,UAAU,CAAQ;IAC1B,OAAO,CAAC,IAAI,CAAM;IAClB,OAAO,CAAC,UAAU,CAAY;gBAG5B,KAAK,EAAE,MAAM,EACb,UAAU,EAAE,MAAM,EAClB,SAAS,EAAE,MAAM,EACjB,WAAW,EAAE,KAAK,EAClB,SAAS,EAAE,WAAW;IASxB,IAAW,KAAK,IAAI,MAAM,CAAuB;IACjD,IAAW,UAAU,IAAI,MAAM,CAA4B;IAC3D,IAAW,SAAS,IAAI,MAAM,CAA2B;IACzD,IAAW,GAAG,IAAI,GAAG,CAAqB;IAC1C,IAAW,SAAS,IAAI,UAAU,CAA2B;IAC7D,IAAW,UAAU,IAAI,MAAM,CAAgC;IAC/D,IAAW,WAAW,IAAI,MAAM,CAAiC;IACjE,IAAW,WAAW,IAAI,GAAG,CAAgE;IAEtF,OAAO,CAAC,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,MAAM,GAAG,eAAe;CAS/D"}

View File

@ -0,0 +1,23 @@
import { Box } from './Box';
import { Dimensions } from './Dimensions';
export class ObjectDetection {
constructor(score, classScore, className, relativeBox, imageDims) {
this._imageDims = new Dimensions(imageDims.width, imageDims.height);
this._score = score;
this._classScore = classScore;
this._className = className;
this._box = new Box(relativeBox).rescale(this._imageDims);
}
get score() { return this._score; }
get classScore() { return this._classScore; }
get className() { return this._className; }
get box() { return this._box; }
get imageDims() { return this._imageDims; }
get imageWidth() { return this.imageDims.width; }
get imageHeight() { return this.imageDims.height; }
get relativeBox() { return new Box(this._box).rescale(this.imageDims.reverse()); }
forSize(width, height) {
return new ObjectDetection(this.score, this.classScore, this.className, this.relativeBox, { width, height });
}
}
//# sourceMappingURL=ObjectDetection.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"ObjectDetection.js","sourceRoot":"","sources":["../../../src/classes/ObjectDetection.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,GAAG,EAAE,MAAM,OAAO,CAAC;AAC5B,OAAO,EAAE,UAAU,EAAe,MAAM,cAAc,CAAC;AAGvD,MAAM,OAAO,eAAe;IAO1B,YACE,KAAa,EACb,UAAkB,EAClB,SAAiB,EACjB,WAAkB,EAClB,SAAsB;QAEtB,IAAI,CAAC,UAAU,GAAG,IAAI,UAAU,CAAC,SAAS,CAAC,KAAK,EAAE,SAAS,CAAC,MAAM,CAAC,CAAA;QACnE,IAAI,CAAC,MAAM,GAAG,KAAK,CAAA;QACnB,IAAI,CAAC,WAAW,GAAG,UAAU,CAAA;QAC7B,IAAI,CAAC,UAAU,GAAG,SAAS,CAAA;QAC3B,IAAI,CAAC,IAAI,GAAG,IAAI,GAAG,CAAC,WAAW,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,UAAU,CAAC,CAAA;IAC3D,CAAC;IAED,IAAW,KAAK,KAAa,OAAO,IAAI,CAAC,MAAM,CAAA,CAAC,CAAC;IACjD,IAAW,UAAU,KAAa,OAAO,IAAI,CAAC,WAAW,CAAA,CAAC,CAAC;IAC3D,IAAW,SAAS,KAAa,OAAO,IAAI,CAAC,UAAU,CAAA,CAAC,CAAC;IACzD,IAAW,GAAG,KAAU,OAAO,IAAI,CAAC,IAAI,CAAA,CAAC,CAAC;IAC1C,IAAW,SAAS,KAAiB,OAAO,IAAI,CAAC,UAAU,CAAA,CAAC,CAAC;IAC7D,IAAW,UAAU,KAAa,OAAO,IAAI,CAAC,SAAS,CAAC,KAAK,CAAA,CAAC,CAAC;IAC/D,IAAW,WAAW,KAAa,OAAO,IAAI,CAAC,SAAS,CAAC,MAAM,CAAA,CAAC,CAAC;IACjE,IAAW,WAAW,KAAU,OAAO,IAAI,GAAG,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC,SAAS,CAAC,OAAO,EAAE,CAAC,CAAA,CAAC,CAAC;IAEtF,OAAO,CAAC,KAAa,EAAE,MAAc;QAC1C,OAAO,IAAI,eAAe,CACxB,IAAI,CAAC,KAAK,EACV,IAAI,CAAC,UAAU,EACf,IAAI,CAAC,SAAS,EACd,IAAI,CAAC,WAAW,EAChB,EAAE,KAAK,EAAE,MAAM,EAAC,CACjB,CAAA;IACH,CAAC;CACF"}

View File

@ -16,3 +16,4 @@ export declare class Point implements IPoint {
magnitude(): number;
floor(): Point;
}
//# sourceMappingURL=Point.d.ts.map

View File

@ -0,0 +1 @@
{"version":3,"file":"Point.d.ts","sourceRoot":"","sources":["../../../src/classes/Point.ts"],"names":[],"mappings":"AAAA,MAAM,WAAW,MAAM;IACrB,CAAC,EAAE,MAAM,CAAA;IACT,CAAC,EAAE,MAAM,CAAA;CACV;AAED,qBAAa,KAAM,YAAW,MAAM;IAClC,OAAO,CAAC,EAAE,CAAQ;IAClB,OAAO,CAAC,EAAE,CAAQ;gBAEN,CAAC,EAAE,MAAM,EAAE,CAAC,EAAE,MAAM;IAKhC,IAAI,CAAC,IAAI,MAAM,CAAmB;IAClC,IAAI,CAAC,IAAI,MAAM,CAAmB;IAE3B,GAAG,CAAC,EAAE,EAAE,MAAM,GAAG,KAAK;IAItB,GAAG,CAAC,EAAE,EAAE,MAAM,GAAG,KAAK;IAItB,GAAG,CAAC,EAAE,EAAE,MAAM,GAAG,KAAK;IAItB,GAAG,CAAC,EAAE,EAAE,MAAM,GAAG,KAAK;IAItB,GAAG,IAAI,KAAK;IAIZ,SAAS,IAAI,MAAM;IAInB,KAAK,IAAI,KAAK;CAGtB"}

View File

@ -0,0 +1,30 @@
export class Point {
constructor(x, y) {
this._x = x;
this._y = y;
}
get x() { return this._x; }
get y() { return this._y; }
add(pt) {
return new Point(this.x + pt.x, this.y + pt.y);
}
sub(pt) {
return new Point(this.x - pt.x, this.y - pt.y);
}
mul(pt) {
return new Point(this.x * pt.x, this.y * pt.y);
}
div(pt) {
return new Point(this.x / pt.x, this.y / pt.y);
}
abs() {
return new Point(Math.abs(this.x), Math.abs(this.y));
}
magnitude() {
return Math.sqrt(Math.pow(this.x, 2) + Math.pow(this.y, 2));
}
floor() {
return new Point(Math.floor(this.x), Math.floor(this.y));
}
}
//# sourceMappingURL=Point.js.map

View File

@ -0,0 +1 @@
{"version":3,"file":"Point.js","sourceRoot":"","sources":["../../../src/classes/Point.ts"],"names":[],"mappings":"AAKA,MAAM,OAAO,KAAK;IAIhB,YAAY,CAAS,EAAE,CAAS;QAC9B,IAAI,CAAC,EAAE,GAAG,CAAC,CAAA;QACX,IAAI,CAAC,EAAE,GAAG,CAAC,CAAA;IACb,CAAC;IAED,IAAI,CAAC,KAAa,OAAO,IAAI,CAAC,EAAE,CAAA,CAAC,CAAC;IAClC,IAAI,CAAC,KAAa,OAAO,IAAI,CAAC,EAAE,CAAA,CAAC,CAAC;IAE3B,GAAG,CAAC,EAAU;QACnB,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAA;IAChD,CAAC;IAEM,GAAG,CAAC,EAAU;QACnB,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAA;IAChD,CAAC;IAEM,GAAG,CAAC,EAAU;QACnB,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAA;IAChD,CAAC;IAEM,GAAG,CAAC,EAAU;QACnB,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAA;IAChD,CAAC;IAEM,GAAG;QACR,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA;IACtD,CAAC;IAEM,SAAS;QACd,OAAO,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,GAAG,IAAI,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,CAAA;IAC7D,CAAC;IAEM,KAAK;QACV,OAAO,IAAI,KAAK,CAAC,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,IAAI,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,CAAA;IAC1D,CAAC;CACF"}

Some files were not shown because too many files have changed in this diff Show More