2020-09-16 18:46:34 +02:00
var _ _defineProperty = Object . defineProperty ;
var _ _hasOwnProperty = Object . prototype . hasOwnProperty ;
var _ _commonJS = ( callback , module ) => ( ) => {
if ( ! module ) {
module = { exports : { } } ;
callback ( module . exports , module ) ;
}
return module . exports ;
} ;
var _ _markAsModule = ( target ) => {
return _ _defineProperty ( target , "__esModule" , { value : true } ) ;
} ;
2020-10-09 03:31:31 +02:00
var _ _export = ( target , all ) => {
2020-09-16 18:46:34 +02:00
_ _markAsModule ( target ) ;
2020-10-09 03:31:31 +02:00
for ( var name in all )
_ _defineProperty ( target , name , { get : all [ name ] , enumerable : true } ) ;
2020-09-16 18:46:34 +02:00
} ;
var _ _exportStar = ( target , module ) => {
_ _markAsModule ( target ) ;
if ( typeof module === "object" || typeof module === "function" ) {
for ( let key in module )
if ( ! _ _hasOwnProperty . call ( target , key ) && key !== "default" )
_ _defineProperty ( target , key , { get : ( ) => module [ key ] , enumerable : true } ) ;
}
return target ;
} ;
var _ _toModule = ( module ) => {
if ( module && module . _ _esModule )
return module ;
return _ _exportStar ( _ _defineProperty ( { } , "default" , { value : module , enumerable : true } ) , module ) ;
} ;
2020-10-09 03:31:31 +02:00
// build/src/env/isNodejs.js
var require _isNodejs = _ _commonJS ( ( exports , module ) => {
_ _export ( exports , {
isNodejs : ( ) => isNodejs3
} ) ;
function isNodejs3 ( ) {
return typeof global === "object" && true && typeof module !== "undefined" && typeof process !== "undefined" && ! ! process . version ;
2020-09-16 18:46:34 +02:00
}
} ) ;
// build/src/draw/drawContour.js
function drawContour ( ctx , points , isClosed = false ) {
ctx . beginPath ( ) ;
points . slice ( 1 ) . forEach ( ( { x , y } , prevIdx ) => {
const from = points [ prevIdx ] ;
ctx . moveTo ( from . x , from . y ) ;
ctx . lineTo ( x , y ) ;
} ) ;
if ( isClosed ) {
const from = points [ points . length - 1 ] ;
const to = points [ 0 ] ;
if ( ! from || ! to ) {
return ;
}
ctx . moveTo ( from . x , from . y ) ;
ctx . lineTo ( to . x , to . y ) ;
}
ctx . stroke ( ) ;
}
// build/src/classes/Dimensions.js
class Dimensions {
constructor ( width , height ) {
if ( ! isValidNumber ( width ) || ! isValidNumber ( height ) ) {
throw new Error ( ` Dimensions.constructor - expected width and height to be valid numbers, instead have ${ JSON . stringify ( { width , height } )} ` ) ;
}
this . _width = width ;
this . _height = height ;
}
get width ( ) {
return this . _width ;
}
get height ( ) {
return this . _height ;
}
reverse ( ) {
return new Dimensions ( 1 / this . width , 1 / this . height ) ;
}
}
// build/src/utils/index.js
const utils _exports = { } ;
_ _export ( utils _exports , {
computeReshapedDimensions : ( ) => computeReshapedDimensions ,
getCenterPoint : ( ) => getCenterPoint ,
isDimensions : ( ) => isDimensions ,
isEven : ( ) => isEven ,
isFloat : ( ) => isFloat ,
isTensor : ( ) => isTensor ,
isTensor1D : ( ) => isTensor1D ,
isTensor2D : ( ) => isTensor2D ,
isTensor3D : ( ) => isTensor3D ,
isTensor4D : ( ) => isTensor4D ,
isValidNumber : ( ) => isValidNumber ,
isValidProbablitiy : ( ) => isValidProbablitiy ,
2020-10-09 03:31:31 +02:00
range : ( ) => range ,
round : ( ) => round
2020-09-16 18:46:34 +02:00
} ) ;
2020-10-09 03:31:31 +02:00
import {
Tensor
} from "@tensorflow/tfjs" ;
function isTensor ( tensor2 , dim ) {
return tensor2 instanceof Tensor && tensor2 . shape . length === dim ;
2020-09-16 18:46:34 +02:00
}
2020-10-09 03:31:31 +02:00
function isTensor1D ( tensor2 ) {
return isTensor ( tensor2 , 1 ) ;
2020-09-16 18:46:34 +02:00
}
2020-10-09 03:31:31 +02:00
function isTensor2D ( tensor2 ) {
return isTensor ( tensor2 , 2 ) ;
2020-09-16 18:46:34 +02:00
}
2020-10-09 03:31:31 +02:00
function isTensor3D ( tensor2 ) {
return isTensor ( tensor2 , 3 ) ;
2020-09-16 18:46:34 +02:00
}
2020-10-09 03:31:31 +02:00
function isTensor4D ( tensor2 ) {
return isTensor ( tensor2 , 4 ) ;
2020-09-16 18:46:34 +02:00
}
function isFloat ( num ) {
return num % 1 !== 0 ;
}
function isEven ( num ) {
return num % 2 === 0 ;
}
2020-10-09 03:31:31 +02:00
function round ( num , prec = 2 ) {
2020-09-16 18:46:34 +02:00
const f = Math . pow ( 10 , prec ) ;
return Math . floor ( num * f ) / f ;
}
function isDimensions ( obj ) {
return obj && obj . width && obj . height ;
}
function computeReshapedDimensions ( { width , height } , inputSize ) {
const scale2 = inputSize / Math . max ( height , width ) ;
return new Dimensions ( Math . round ( width * scale2 ) , Math . round ( height * scale2 ) ) ;
}
function getCenterPoint ( pts ) {
2020-10-09 03:31:31 +02:00
return pts . reduce ( ( sum , pt ) => sum . add ( pt ) , new Point ( 0 , 0 ) ) . div ( new Point ( pts . length , pts . length ) ) ;
2020-09-16 18:46:34 +02:00
}
2020-10-09 03:31:31 +02:00
function range ( num , start , step ) {
return Array ( num ) . fill ( 0 ) . map ( ( _ , i ) => start + i * step ) ;
2020-09-16 18:46:34 +02:00
}
function isValidNumber ( num ) {
return ! ! num && num !== Infinity && num !== - Infinity && ! isNaN ( num ) || num === 0 ;
}
function isValidProbablitiy ( num ) {
return isValidNumber ( num ) && 0 <= num && num <= 1 ;
}
// build/src/classes/Point.js
class Point {
constructor ( x , y ) {
this . _x = x ;
this . _y = y ;
}
get x ( ) {
return this . _x ;
}
get y ( ) {
return this . _y ;
}
add ( pt ) {
return new Point ( this . x + pt . x , this . y + pt . y ) ;
}
sub ( pt ) {
return new Point ( this . x - pt . x , this . y - pt . y ) ;
}
mul ( pt ) {
return new Point ( this . x * pt . x , this . y * pt . y ) ;
}
div ( pt ) {
return new Point ( this . x / pt . x , this . y / pt . y ) ;
}
abs ( ) {
return new Point ( Math . abs ( this . x ) , Math . abs ( this . y ) ) ;
}
magnitude ( ) {
return Math . sqrt ( Math . pow ( this . x , 2 ) + Math . pow ( this . y , 2 ) ) ;
}
floor ( ) {
return new Point ( Math . floor ( this . x ) , Math . floor ( this . y ) ) ;
}
}
// build/src/classes/Box.js
class Box {
constructor ( _box , allowNegativeDimensions = true ) {
const box = _box || { } ;
const isBbox = [ box . left , box . top , box . right , box . bottom ] . every ( isValidNumber ) ;
const isRect = [ box . x , box . y , box . width , box . height ] . every ( isValidNumber ) ;
if ( ! isRect && ! isBbox ) {
throw new Error ( ` Box.constructor - expected box to be IBoundingBox | IRect, instead have ${ JSON . stringify ( box ) } ` ) ;
}
const [ x , y , width , height ] = isRect ? [ box . x , box . y , box . width , box . height ] : [ box . left , box . top , box . right - box . left , box . bottom - box . top ] ;
Box . assertIsValidBox ( { x , y , width , height } , "Box.constructor" , allowNegativeDimensions ) ;
this . _x = x ;
this . _y = y ;
this . _width = width ;
this . _height = height ;
}
static isRect ( rect ) {
return ! ! rect && [ rect . x , rect . y , rect . width , rect . height ] . every ( isValidNumber ) ;
}
static assertIsValidBox ( box , callee , allowNegativeDimensions = false ) {
if ( ! Box . isRect ( box ) ) {
throw new Error ( ` ${ callee } - invalid box: ${ JSON . stringify ( box ) } , expected object with properties x, y, width, height ` ) ;
}
if ( ! allowNegativeDimensions && ( box . width < 0 || box . height < 0 ) ) {
throw new Error ( ` ${ callee } - width ( ${ box . width } ) and height ( ${ box . height } ) must be positive numbers ` ) ;
}
}
get x ( ) {
return this . _x ;
}
get y ( ) {
return this . _y ;
}
get width ( ) {
return this . _width ;
}
get height ( ) {
return this . _height ;
}
get left ( ) {
return this . x ;
}
get top ( ) {
return this . y ;
}
get right ( ) {
return this . x + this . width ;
}
get bottom ( ) {
return this . y + this . height ;
}
get area ( ) {
return this . width * this . height ;
}
get topLeft ( ) {
return new Point ( this . left , this . top ) ;
}
get topRight ( ) {
return new Point ( this . right , this . top ) ;
}
get bottomLeft ( ) {
return new Point ( this . left , this . bottom ) ;
}
get bottomRight ( ) {
return new Point ( this . right , this . bottom ) ;
}
round ( ) {
const [ x , y , width , height ] = [ this . x , this . y , this . width , this . height ] . map ( ( val ) => Math . round ( val ) ) ;
return new Box ( { x , y , width , height } ) ;
}
floor ( ) {
const [ x , y , width , height ] = [ this . x , this . y , this . width , this . height ] . map ( ( val ) => Math . floor ( val ) ) ;
return new Box ( { x , y , width , height } ) ;
}
toSquare ( ) {
let { x , y , width , height } = this ;
const diff = Math . abs ( width - height ) ;
if ( width < height ) {
x -= diff / 2 ;
width += diff ;
}
if ( height < width ) {
y -= diff / 2 ;
height += diff ;
}
return new Box ( { x , y , width , height } ) ;
}
rescale ( s ) {
const scaleX = isDimensions ( s ) ? s . width : s ;
const scaleY = isDimensions ( s ) ? s . height : s ;
return new Box ( {
x : this . x * scaleX ,
y : this . y * scaleY ,
width : this . width * scaleX ,
height : this . height * scaleY
} ) ;
}
pad ( padX , padY ) {
let [ x , y , width , height ] = [
this . x - padX / 2 ,
this . y - padY / 2 ,
this . width + padX ,
this . height + padY
] ;
return new Box ( { x , y , width , height } ) ;
}
clipAtImageBorders ( imgWidth , imgHeight ) {
const { x , y , right , bottom } = this ;
const clippedX = Math . max ( x , 0 ) ;
const clippedY = Math . max ( y , 0 ) ;
const newWidth = right - clippedX ;
const newHeight = bottom - clippedY ;
const clippedWidth = Math . min ( newWidth , imgWidth - clippedX ) ;
const clippedHeight = Math . min ( newHeight , imgHeight - clippedY ) ;
return new Box ( { x : clippedX , y : clippedY , width : clippedWidth , height : clippedHeight } ) . floor ( ) ;
}
shift ( sx , sy ) {
const { width , height } = this ;
const x = this . x + sx ;
const y = this . y + sy ;
return new Box ( { x , y , width , height } ) ;
}
padAtBorders ( imageHeight , imageWidth ) {
const w = this . width + 1 ;
const h = this . height + 1 ;
let dx = 1 ;
let dy = 1 ;
let edx = w ;
let edy = h ;
let x = this . left ;
let y = this . top ;
let ex = this . right ;
let ey = this . bottom ;
if ( ex > imageWidth ) {
edx = - ex + imageWidth + w ;
ex = imageWidth ;
}
if ( ey > imageHeight ) {
edy = - ey + imageHeight + h ;
ey = imageHeight ;
}
if ( x < 1 ) {
edy = 2 - x ;
x = 1 ;
}
if ( y < 1 ) {
edy = 2 - y ;
y = 1 ;
}
return { dy , edy , dx , edx , y , ey , x , ex , w , h } ;
}
calibrate ( region ) {
return new Box ( {
left : this . left + region . left * this . width ,
top : this . top + region . top * this . height ,
right : this . right + region . right * this . width ,
bottom : this . bottom + region . bottom * this . height
} ) . toSquare ( ) . round ( ) ;
}
}
// build/src/classes/BoundingBox.js
class BoundingBox extends Box {
constructor ( left , top , right , bottom , allowNegativeDimensions = false ) {
super ( { left , top , right , bottom } , allowNegativeDimensions ) ;
}
}
// build/src/classes/ObjectDetection.js
class ObjectDetection {
constructor ( score , classScore , className , relativeBox , imageDims ) {
this . _imageDims = new Dimensions ( imageDims . width , imageDims . height ) ;
this . _score = score ;
this . _classScore = classScore ;
this . _className = className ;
this . _box = new Box ( relativeBox ) . rescale ( this . _imageDims ) ;
}
get score ( ) {
return this . _score ;
}
get classScore ( ) {
return this . _classScore ;
}
get className ( ) {
return this . _className ;
}
get box ( ) {
return this . _box ;
}
get imageDims ( ) {
return this . _imageDims ;
}
get imageWidth ( ) {
return this . imageDims . width ;
}
get imageHeight ( ) {
return this . imageDims . height ;
}
get relativeBox ( ) {
return new Box ( this . _box ) . rescale ( this . imageDims . reverse ( ) ) ;
}
forSize ( width , height ) {
return new ObjectDetection ( this . score , this . classScore , this . className , this . relativeBox , { width , height } ) ;
}
}
// build/src/classes/FaceDetection.js
class FaceDetection extends ObjectDetection {
constructor ( score , relativeBox , imageDims ) {
super ( score , score , "" , relativeBox , imageDims ) ;
}
forSize ( width , height ) {
const { score , relativeBox , imageDims } = super . forSize ( width , height ) ;
return new FaceDetection ( score , relativeBox , imageDims ) ;
}
}
// build/src/ops/iou.js
function iou ( box1 , box2 , isIOU = true ) {
const width = Math . max ( 0 , Math . min ( box1 . right , box2 . right ) - Math . max ( box1 . left , box2 . left ) ) ;
const height = Math . max ( 0 , Math . min ( box1 . bottom , box2 . bottom ) - Math . max ( box1 . top , box2 . top ) ) ;
const interSection = width * height ;
return isIOU ? interSection / ( box1 . area + box2 . area - interSection ) : interSection / Math . min ( box1 . area , box2 . area ) ;
}
// build/src/ops/minBbox.js
function minBbox ( pts ) {
const xs = pts . map ( ( pt ) => pt . x ) ;
const ys = pts . map ( ( pt ) => pt . y ) ;
2020-10-09 03:31:31 +02:00
const minX = xs . reduce ( ( min , x ) => x < min ? x : min , Infinity ) ;
const minY = ys . reduce ( ( min , y ) => y < min ? y : min , Infinity ) ;
const maxX = xs . reduce ( ( max , x ) => max < x ? x : max , 0 ) ;
const maxY = ys . reduce ( ( max , y ) => max < y ? y : max , 0 ) ;
2020-09-16 18:46:34 +02:00
return new BoundingBox ( minX , minY , maxX , maxY ) ;
}
// build/src/ops/nonMaxSuppression.js
2020-10-09 03:31:31 +02:00
function nonMaxSuppression ( boxes , scores , iouThreshold , isIOU = true ) {
2020-09-16 18:46:34 +02:00
let indicesSortedByScore = scores . map ( ( score , boxIndex ) => ( { score , boxIndex } ) ) . sort ( ( c1 , c2 ) => c1 . score - c2 . score ) . map ( ( c ) => c . boxIndex ) ;
const pick = [ ] ;
while ( indicesSortedByScore . length > 0 ) {
const curr = indicesSortedByScore . pop ( ) ;
pick . push ( curr ) ;
const indices = indicesSortedByScore ;
const outputs = [ ] ;
for ( let i = 0 ; i < indices . length ; i ++ ) {
const idx = indices [ i ] ;
const currBox = boxes [ curr ] ;
const idxBox = boxes [ idx ] ;
outputs . push ( iou ( currBox , idxBox , isIOU ) ) ;
}
indicesSortedByScore = indicesSortedByScore . filter ( ( _ , j ) => outputs [ j ] <= iouThreshold ) ;
}
return pick ;
}
// build/src/ops/normalize.js
2020-10-09 03:31:31 +02:00
import {
concat ,
fill ,
sub ,
tidy
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function normalize ( x , meanRgb ) {
return tidy ( ( ) => {
const [ r , g , b ] = meanRgb ;
const avg _r = fill ( [ ... x . shape . slice ( 0 , 3 ) , 1 ] , r ) ;
const avg _g = fill ( [ ... x . shape . slice ( 0 , 3 ) , 1 ] , g ) ;
const avg _b = fill ( [ ... x . shape . slice ( 0 , 3 ) , 1 ] , b ) ;
const avg _rgb = concat ( [ avg _r , avg _g , avg _b ] , 3 ) ;
return sub ( x , avg _rgb ) ;
} ) ;
}
// build/src/ops/padToSquare.js
2020-10-09 03:31:31 +02:00
import {
cast ,
concat as concat2 ,
fill as fill2 ,
tidy as tidy2
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function padToSquare ( imgTensor , isCenterImage = false ) {
2020-10-09 03:31:31 +02:00
return tidy2 ( ( ) => {
2020-09-16 18:46:34 +02:00
const [ height , width ] = imgTensor . shape . slice ( 1 ) ;
if ( height === width ) {
return imgTensor ;
}
const dimDiff = Math . abs ( height - width ) ;
const paddingAmount = Math . round ( dimDiff * ( isCenterImage ? 0.5 : 1 ) ) ;
const paddingAxis = height > width ? 2 : 1 ;
const createPaddingTensor = ( paddingAmount2 ) => {
const paddingTensorShape = imgTensor . shape . slice ( ) ;
paddingTensorShape [ paddingAxis ] = paddingAmount2 ;
2020-10-09 03:31:31 +02:00
return fill2 ( paddingTensorShape , 0 ) ;
2020-09-16 18:46:34 +02:00
} ;
const paddingTensorAppend = createPaddingTensor ( paddingAmount ) ;
const remainingPaddingAmount = dimDiff - paddingTensorAppend . shape [ paddingAxis ] ;
const paddingTensorPrepend = isCenterImage && remainingPaddingAmount ? createPaddingTensor ( remainingPaddingAmount ) : null ;
const tensorsToStack = [
paddingTensorPrepend ,
imgTensor ,
paddingTensorAppend
] . filter ( ( t ) => ! ! t ) . map ( ( t ) => cast ( t , "float32" ) ) ;
2020-10-09 03:31:31 +02:00
return concat2 ( tensorsToStack , paddingAxis ) ;
2020-09-16 18:46:34 +02:00
} ) ;
}
// build/src/ops/shuffleArray.js
function shuffleArray ( inputArray ) {
const array = inputArray . slice ( ) ;
for ( let i = array . length - 1 ; i > 0 ; i -- ) {
const j = Math . floor ( Math . random ( ) * ( i + 1 ) ) ;
const x = array [ i ] ;
array [ i ] = array [ j ] ;
array [ j ] = x ;
}
return array ;
}
// build/src/ops/index.js
2020-10-09 03:31:31 +02:00
function sigmoid ( x ) {
2020-09-16 18:46:34 +02:00
return 1 / ( 1 + Math . exp ( - x ) ) ;
}
function inverseSigmoid ( x ) {
return Math . log ( x / ( 1 - x ) ) ;
}
// build/src/classes/Rect.js
class Rect extends Box {
constructor ( x , y , width , height , allowNegativeDimensions = false ) {
super ( { x , y , width , height } , allowNegativeDimensions ) ;
}
}
// build/src/classes/FaceLandmarks.js
const relX = 0.5 ;
const relY = 0.43 ;
const relScale = 0.45 ;
class FaceLandmarks {
constructor ( relativeFaceLandmarkPositions , imgDims , shift = new Point ( 0 , 0 ) ) {
const { width , height } = imgDims ;
this . _imgDims = new Dimensions ( width , height ) ;
this . _shift = shift ;
this . _positions = relativeFaceLandmarkPositions . map ( ( pt ) => pt . mul ( new Point ( width , height ) ) . add ( shift ) ) ;
}
get shift ( ) {
return new Point ( this . _shift . x , this . _shift . y ) ;
}
get imageWidth ( ) {
return this . _imgDims . width ;
}
get imageHeight ( ) {
return this . _imgDims . height ;
}
get positions ( ) {
return this . _positions ;
}
get relativePositions ( ) {
return this . _positions . map ( ( pt ) => pt . sub ( this . _shift ) . div ( new Point ( this . imageWidth , this . imageHeight ) ) ) ;
}
forSize ( width , height ) {
return new this . constructor ( this . relativePositions , { width , height } ) ;
}
shiftBy ( x , y ) {
return new this . constructor ( this . relativePositions , this . _imgDims , new Point ( x , y ) ) ;
}
shiftByPoint ( pt ) {
return this . shiftBy ( pt . x , pt . y ) ;
}
align ( detection , options = { } ) {
if ( detection ) {
const box = detection instanceof FaceDetection ? detection . box . floor ( ) : new Box ( detection ) ;
return this . shiftBy ( box . x , box . y ) . align ( null , options ) ;
}
const { useDlibAlignment , minBoxPadding } = Object . assign ( { } , { useDlibAlignment : false , minBoxPadding : 0.2 } , options ) ;
if ( useDlibAlignment ) {
return this . alignDlib ( ) ;
}
return this . alignMinBbox ( minBoxPadding ) ;
}
alignDlib ( ) {
const centers = this . getRefPointsForAlignment ( ) ;
const [ leftEyeCenter , rightEyeCenter , mouthCenter ] = centers ;
const distToMouth = ( pt ) => mouthCenter . sub ( pt ) . magnitude ( ) ;
const eyeToMouthDist = ( distToMouth ( leftEyeCenter ) + distToMouth ( rightEyeCenter ) ) / 2 ;
const size = Math . floor ( eyeToMouthDist / relScale ) ;
const refPoint = getCenterPoint ( centers ) ;
const x = Math . floor ( Math . max ( 0 , refPoint . x - relX * size ) ) ;
const y = Math . floor ( Math . max ( 0 , refPoint . y - relY * size ) ) ;
return new Rect ( x , y , Math . min ( size , this . imageWidth + x ) , Math . min ( size , this . imageHeight + y ) ) ;
}
alignMinBbox ( padding ) {
const box = minBbox ( this . positions ) ;
return box . pad ( box . width * padding , box . height * padding ) ;
}
getRefPointsForAlignment ( ) {
throw new Error ( "getRefPointsForAlignment not implemented by base class" ) ;
}
}
// build/src/classes/FaceLandmarks5.js
class FaceLandmarks5 extends FaceLandmarks {
getRefPointsForAlignment ( ) {
const pts = this . positions ;
return [
pts [ 0 ] ,
pts [ 1 ] ,
getCenterPoint ( [ pts [ 3 ] , pts [ 4 ] ] )
] ;
}
}
// build/src/classes/FaceLandmarks68.js
class FaceLandmarks68 extends FaceLandmarks {
getJawOutline ( ) {
return this . positions . slice ( 0 , 17 ) ;
}
getLeftEyeBrow ( ) {
return this . positions . slice ( 17 , 22 ) ;
}
getRightEyeBrow ( ) {
return this . positions . slice ( 22 , 27 ) ;
}
getNose ( ) {
return this . positions . slice ( 27 , 36 ) ;
}
getLeftEye ( ) {
return this . positions . slice ( 36 , 42 ) ;
}
getRightEye ( ) {
return this . positions . slice ( 42 , 48 ) ;
}
getMouth ( ) {
return this . positions . slice ( 48 , 68 ) ;
}
getRefPointsForAlignment ( ) {
return [
this . getLeftEye ( ) ,
this . getRightEye ( ) ,
this . getMouth ( )
] . map ( getCenterPoint ) ;
}
}
// build/src/classes/FaceMatch.js
class FaceMatch {
constructor ( label , distance ) {
this . _label = label ;
this . _distance = distance ;
}
get label ( ) {
return this . _label ;
}
get distance ( ) {
return this . _distance ;
}
toString ( withDistance = true ) {
2020-10-09 03:31:31 +02:00
return ` ${ this . label } ${ withDistance ? ` ( ${ round ( this . distance ) } ) ` : "" } ` ;
2020-09-16 18:46:34 +02:00
}
}
// build/src/classes/LabeledBox.js
class LabeledBox extends Box {
constructor ( box , label ) {
super ( box ) ;
this . _label = label ;
}
static assertIsValidLabeledBox ( box , callee ) {
Box . assertIsValidBox ( box , callee ) ;
if ( ! isValidNumber ( box . label ) ) {
throw new Error ( ` ${ callee } - expected property label ( ${ box . label } ) to be a number ` ) ;
}
}
get label ( ) {
return this . _label ;
}
}
// build/src/classes/LabeledFaceDescriptors.js
class LabeledFaceDescriptors {
constructor ( label , descriptors ) {
if ( ! ( typeof label === "string" ) ) {
throw new Error ( "LabeledFaceDescriptors - constructor expected label to be a string" ) ;
}
if ( ! Array . isArray ( descriptors ) || descriptors . some ( ( desc ) => ! ( desc instanceof Float32Array ) ) ) {
throw new Error ( "LabeledFaceDescriptors - constructor expected descriptors to be an array of Float32Array" ) ;
}
this . _label = label ;
this . _descriptors = descriptors ;
}
get label ( ) {
return this . _label ;
}
get descriptors ( ) {
return this . _descriptors ;
}
toJSON ( ) {
return {
label : this . label ,
descriptors : this . descriptors . map ( ( d ) => Array . from ( d ) )
} ;
}
static fromJSON ( json ) {
const descriptors = json . descriptors . map ( ( d ) => {
return new Float32Array ( d ) ;
} ) ;
return new LabeledFaceDescriptors ( json . label , descriptors ) ;
}
}
// build/src/classes/PredictedBox.js
class PredictedBox extends LabeledBox {
constructor ( box , label , score , classScore ) {
super ( box , label ) ;
this . _score = score ;
this . _classScore = classScore ;
}
static assertIsValidPredictedBox ( box , callee ) {
LabeledBox . assertIsValidLabeledBox ( box , callee ) ;
if ( ! isValidProbablitiy ( box . score ) || ! isValidProbablitiy ( box . classScore ) ) {
throw new Error ( ` ${ callee } - expected properties score ( ${ box . score } ) and ( ${ box . classScore } ) to be a number between [0, 1] ` ) ;
}
}
get score ( ) {
return this . _score ;
}
get classScore ( ) {
return this . _classScore ;
}
}
// build/src/classes/index.js
// build/src/factories/WithFaceDetection.js
function isWithFaceDetection ( obj ) {
return obj [ "detection" ] instanceof FaceDetection ;
}
function extendWithFaceDetection ( sourceObj , detection ) {
const extension = { detection } ;
return Object . assign ( { } , sourceObj , extension ) ;
}
// build/src/env/createBrowserEnv.js
function createBrowserEnv ( ) {
2020-10-09 03:31:31 +02:00
const fetch = window [ "fetch" ] || function ( ) {
2020-09-16 18:46:34 +02:00
throw new Error ( "fetch - missing fetch implementation for browser environment" ) ;
} ;
const readFile = function ( ) {
throw new Error ( "readFile - filesystem not available for browser environment" ) ;
} ;
return {
Canvas : HTMLCanvasElement ,
CanvasRenderingContext2D ,
Image : HTMLImageElement ,
ImageData ,
Video : HTMLVideoElement ,
createCanvasElement : ( ) => document . createElement ( "canvas" ) ,
createImageElement : ( ) => document . createElement ( "img" ) ,
2020-10-09 03:31:31 +02:00
fetch ,
2020-09-16 18:46:34 +02:00
readFile
} ;
}
// build/src/env/createFileSystem.js
function createFileSystem ( fs ) {
let requireFsError = "" ;
if ( ! fs ) {
try {
fs = require ( "fs" ) ;
} catch ( err ) {
requireFsError = err . toString ( ) ;
}
}
const readFile = fs ? function ( filePath ) {
return new Promise ( ( res , rej ) => {
2020-10-09 03:31:31 +02:00
fs . readFile ( filePath , function ( err , buffer ) {
return err ? rej ( err ) : res ( buffer ) ;
2020-09-16 18:46:34 +02:00
} ) ;
} ) ;
} : function ( ) {
throw new Error ( ` readFile - failed to require fs in nodejs environment with error: ${ requireFsError } ` ) ;
} ;
return {
readFile
} ;
}
// build/src/env/createNodejsEnv.js
function createNodejsEnv ( ) {
const Canvas = global [ "Canvas" ] || global [ "HTMLCanvasElement" ] ;
const Image = global [ "Image" ] || global [ "HTMLImageElement" ] ;
const createCanvasElement = function ( ) {
if ( Canvas ) {
return new Canvas ( ) ;
}
throw new Error ( "createCanvasElement - missing Canvas implementation for nodejs environment" ) ;
} ;
const createImageElement = function ( ) {
if ( Image ) {
return new Image ( ) ;
}
throw new Error ( "createImageElement - missing Image implementation for nodejs environment" ) ;
} ;
2020-10-09 03:31:31 +02:00
const fetch = global [ "fetch" ] || function ( ) {
2020-09-16 18:46:34 +02:00
throw new Error ( "fetch - missing fetch implementation for nodejs environment" ) ;
} ;
const fileSystem = createFileSystem ( ) ;
2020-10-09 03:31:31 +02:00
return {
2020-09-16 18:46:34 +02:00
Canvas : Canvas || class {
} ,
CanvasRenderingContext2D : global [ "CanvasRenderingContext2D" ] || class {
} ,
Image : Image || class {
} ,
ImageData : global [ "ImageData" ] || class {
} ,
Video : global [ "HTMLVideoElement" ] || class {
} ,
createCanvasElement ,
createImageElement ,
2020-10-09 03:31:31 +02:00
fetch ,
... fileSystem
} ;
2020-09-16 18:46:34 +02:00
}
// build/src/env/isBrowser.js
2020-10-09 03:31:31 +02:00
function isBrowser ( ) {
2020-09-16 18:46:34 +02:00
return typeof window === "object" && typeof document !== "undefined" && typeof HTMLImageElement !== "undefined" && typeof HTMLCanvasElement !== "undefined" && typeof HTMLVideoElement !== "undefined" && typeof ImageData !== "undefined" && typeof CanvasRenderingContext2D !== "undefined" ;
}
// build/src/env/types.js
// build/src/env/index.js
const isNodejs = _ _toModule ( require _isNodejs ( ) ) ;
2020-10-09 03:31:31 +02:00
let environment ;
2020-09-16 18:46:34 +02:00
function getEnv ( ) {
2020-10-09 03:31:31 +02:00
if ( ! environment ) {
2020-09-16 18:46:34 +02:00
throw new Error ( "getEnv - environment is not defined, check isNodejs() and isBrowser()" ) ;
}
2020-10-09 03:31:31 +02:00
return environment ;
2020-09-16 18:46:34 +02:00
}
2020-10-09 03:31:31 +02:00
function setEnv ( env16 ) {
environment = env16 ;
2020-09-16 18:46:34 +02:00
}
function initialize ( ) {
2020-10-09 03:31:31 +02:00
if ( isBrowser ( ) ) {
2020-09-16 18:46:34 +02:00
return setEnv ( createBrowserEnv ( ) ) ;
}
if ( isNodejs . isNodejs ( ) ) {
return setEnv ( createNodejsEnv ( ) ) ;
}
}
2020-10-09 03:31:31 +02:00
function monkeyPatch ( env16 ) {
if ( ! environment ) {
2020-09-16 18:46:34 +02:00
initialize ( ) ;
}
2020-10-09 03:31:31 +02:00
if ( ! environment ) {
2020-09-16 18:46:34 +02:00
throw new Error ( "monkeyPatch - environment is not defined, check isNodejs() and isBrowser()" ) ;
}
2020-10-09 03:31:31 +02:00
const { Canvas = environment . Canvas , Image = environment . Image } = env16 ;
environment . Canvas = Canvas ;
environment . Image = Image ;
environment . createCanvasElement = env16 . createCanvasElement || ( ( ) => new Canvas ( ) ) ;
environment . createImageElement = env16 . createImageElement || ( ( ) => new Image ( ) ) ;
environment . ImageData = env16 . ImageData || environment . ImageData ;
environment . Video = env16 . Video || environment . Video ;
environment . fetch = env16 . fetch || environment . fetch ;
environment . readFile = env16 . readFile || environment . readFile ;
}
const env = {
2020-09-16 18:46:34 +02:00
getEnv ,
setEnv ,
initialize ,
createBrowserEnv ,
createFileSystem ,
createNodejsEnv ,
monkeyPatch ,
2020-10-09 03:31:31 +02:00
isBrowser ,
2020-09-16 18:46:34 +02:00
isNodejs : isNodejs . isNodejs
} ;
initialize ( ) ;
// build/src/dom/resolveInput.js
function resolveInput ( arg ) {
2020-10-09 03:31:31 +02:00
if ( ! env . isNodejs ( ) && typeof arg === "string" ) {
2020-09-16 18:46:34 +02:00
return document . getElementById ( arg ) ;
}
return arg ;
}
// build/src/dom/getContext2dOrThrow.js
function getContext2dOrThrow ( canvasArg ) {
2020-10-09 03:31:31 +02:00
const { Canvas , CanvasRenderingContext2D : CanvasRenderingContext2D2 } = env . getEnv ( ) ;
2020-09-16 18:46:34 +02:00
if ( canvasArg instanceof CanvasRenderingContext2D2 ) {
return canvasArg ;
}
const canvas = resolveInput ( canvasArg ) ;
if ( ! ( canvas instanceof Canvas ) ) {
throw new Error ( "resolveContext2d - expected canvas to be of instance of Canvas" ) ;
}
const ctx = canvas . getContext ( "2d" ) ;
if ( ! ctx ) {
throw new Error ( "resolveContext2d - canvas 2d context is null" ) ;
}
return ctx ;
}
// build/src/draw/DrawTextField.js
var AnchorPosition ;
( function ( AnchorPosition2 ) {
AnchorPosition2 [ "TOP_LEFT" ] = "TOP_LEFT" ;
AnchorPosition2 [ "TOP_RIGHT" ] = "TOP_RIGHT" ;
AnchorPosition2 [ "BOTTOM_LEFT" ] = "BOTTOM_LEFT" ;
AnchorPosition2 [ "BOTTOM_RIGHT" ] = "BOTTOM_RIGHT" ;
} ) ( AnchorPosition || ( AnchorPosition = { } ) ) ;
class DrawTextFieldOptions {
constructor ( options = { } ) {
const { anchorPosition , backgroundColor , fontColor , fontSize , fontStyle , padding } = options ;
this . anchorPosition = anchorPosition || AnchorPosition . TOP _LEFT ;
this . backgroundColor = backgroundColor || "rgba(0, 0, 0, 0.5)" ;
this . fontColor = fontColor || "rgba(255, 255, 255, 1)" ;
this . fontSize = fontSize || 14 ;
this . fontStyle = fontStyle || "Georgia" ;
this . padding = padding || 4 ;
}
}
class DrawTextField {
constructor ( text , anchor , options = { } ) {
this . text = typeof text === "string" ? [ text ] : text instanceof DrawTextField ? text . text : text ;
this . anchor = anchor ;
this . options = new DrawTextFieldOptions ( options ) ;
}
measureWidth ( ctx ) {
const { padding } = this . options ;
return this . text . map ( ( l ) => ctx . measureText ( l ) . width ) . reduce ( ( w0 , w1 ) => w0 < w1 ? w1 : w0 , 0 ) + 2 * padding ;
}
measureHeight ( ) {
const { fontSize , padding } = this . options ;
return this . text . length * fontSize + 2 * padding ;
}
getUpperLeft ( ctx , canvasDims ) {
const { anchorPosition } = this . options ;
const isShiftLeft = anchorPosition === AnchorPosition . BOTTOM _RIGHT || anchorPosition === AnchorPosition . TOP _RIGHT ;
const isShiftTop = anchorPosition === AnchorPosition . BOTTOM _LEFT || anchorPosition === AnchorPosition . BOTTOM _RIGHT ;
const textFieldWidth = this . measureWidth ( ctx ) ;
const textFieldHeight = this . measureHeight ( ) ;
const x = isShiftLeft ? this . anchor . x - textFieldWidth : this . anchor . x ;
const y = isShiftTop ? this . anchor . y - textFieldHeight : this . anchor . y ;
if ( canvasDims ) {
const { width , height } = canvasDims ;
const newX = Math . max ( Math . min ( x , width - textFieldWidth ) , 0 ) ;
const newY = Math . max ( Math . min ( y , height - textFieldHeight ) , 0 ) ;
return { x : newX , y : newY } ;
}
return { x , y } ;
}
draw ( canvasArg ) {
const canvas = resolveInput ( canvasArg ) ;
const ctx = getContext2dOrThrow ( canvas ) ;
const { backgroundColor , fontColor , fontSize , fontStyle , padding } = this . options ;
ctx . font = ` ${ fontSize } px ${ fontStyle } ` ;
const maxTextWidth = this . measureWidth ( ctx ) ;
const textHeight = this . measureHeight ( ) ;
ctx . fillStyle = backgroundColor ;
const upperLeft = this . getUpperLeft ( ctx , canvas ) ;
ctx . fillRect ( upperLeft . x , upperLeft . y , maxTextWidth , textHeight ) ;
ctx . fillStyle = fontColor ;
this . text . forEach ( ( textLine , i ) => {
const x = padding + upperLeft . x ;
const y = padding + upperLeft . y + ( i + 1 ) * fontSize ;
ctx . fillText ( textLine , x , y ) ;
} ) ;
}
}
// build/src/draw/DrawBox.js
class DrawBoxOptions {
constructor ( options = { } ) {
const { boxColor , lineWidth , label , drawLabelOptions } = options ;
this . boxColor = boxColor || "rgba(0, 0, 255, 1)" ;
this . lineWidth = lineWidth || 2 ;
this . label = label ;
const defaultDrawLabelOptions = {
anchorPosition : AnchorPosition . BOTTOM _LEFT ,
backgroundColor : this . boxColor
} ;
this . drawLabelOptions = new DrawTextFieldOptions ( Object . assign ( { } , defaultDrawLabelOptions , drawLabelOptions ) ) ;
}
}
class DrawBox {
constructor ( box , options = { } ) {
this . box = new Box ( box ) ;
this . options = new DrawBoxOptions ( options ) ;
}
draw ( canvasArg ) {
const ctx = getContext2dOrThrow ( canvasArg ) ;
const { boxColor , lineWidth } = this . options ;
const { x , y , width , height } = this . box ;
ctx . strokeStyle = boxColor ;
ctx . lineWidth = lineWidth ;
ctx . strokeRect ( x , y , width , height ) ;
const { label } = this . options ;
if ( label ) {
new DrawTextField ( [ label ] , { x : x - lineWidth / 2 , y } , this . options . drawLabelOptions ) . draw ( canvasArg ) ;
}
}
}
// build/src/draw/drawDetections.js
function drawDetections ( canvasArg , detections ) {
const detectionsArray = Array . isArray ( detections ) ? detections : [ detections ] ;
detectionsArray . forEach ( ( det ) => {
const score = det instanceof FaceDetection ? det . score : isWithFaceDetection ( det ) ? det . detection . score : void 0 ;
const box = det instanceof FaceDetection ? det . box : isWithFaceDetection ( det ) ? det . detection . box : new Box ( det ) ;
2020-10-09 03:31:31 +02:00
const label = score ? ` ${ round ( score ) } ` : void 0 ;
2020-09-16 18:46:34 +02:00
new DrawBox ( box , { label } ) . draw ( canvasArg ) ;
} ) ;
}
// build/src/dom/isMediaLoaded.js
function isMediaLoaded ( media ) {
2020-10-09 03:31:31 +02:00
const { Image , Video } = env . getEnv ( ) ;
2020-09-16 18:46:34 +02:00
return media instanceof Image && media . complete || media instanceof Video && media . readyState >= 3 ;
}
// build/src/dom/awaitMediaLoaded.js
function awaitMediaLoaded ( media ) {
return new Promise ( ( resolve , reject ) => {
2020-10-09 03:31:31 +02:00
if ( media instanceof env . getEnv ( ) . Canvas || isMediaLoaded ( media ) ) {
2020-09-16 18:46:34 +02:00
return resolve ( null ) ;
}
function onLoad ( e ) {
if ( ! e . currentTarget )
return ;
e . currentTarget . removeEventListener ( "load" , onLoad ) ;
e . currentTarget . removeEventListener ( "error" , onError ) ;
resolve ( e ) ;
}
function onError ( e ) {
if ( ! e . currentTarget )
return ;
e . currentTarget . removeEventListener ( "load" , onLoad ) ;
e . currentTarget . removeEventListener ( "error" , onError ) ;
reject ( e ) ;
}
media . addEventListener ( "load" , onLoad ) ;
media . addEventListener ( "error" , onError ) ;
} ) ;
}
// build/src/dom/bufferToImage.js
function bufferToImage ( buf ) {
return new Promise ( ( resolve , reject ) => {
if ( ! ( buf instanceof Blob ) ) {
return reject ( "bufferToImage - expected buf to be of type: Blob" ) ;
}
const reader = new FileReader ( ) ;
reader . onload = ( ) => {
if ( typeof reader . result !== "string" ) {
return reject ( "bufferToImage - expected reader.result to be a string, in onload" ) ;
}
2020-10-09 03:31:31 +02:00
const img = env . getEnv ( ) . createImageElement ( ) ;
2020-09-16 18:46:34 +02:00
img . onload = ( ) => resolve ( img ) ;
img . onerror = reject ;
img . src = reader . result ;
} ;
reader . onerror = reject ;
reader . readAsDataURL ( buf ) ;
} ) ;
}
// build/src/dom/getMediaDimensions.js
function getMediaDimensions ( input ) {
2020-10-09 03:31:31 +02:00
const { Image , Video } = env . getEnv ( ) ;
2020-09-16 18:46:34 +02:00
if ( input instanceof Image ) {
return new Dimensions ( input . naturalWidth , input . naturalHeight ) ;
}
if ( input instanceof Video ) {
return new Dimensions ( input . videoWidth , input . videoHeight ) ;
}
return new Dimensions ( input . width , input . height ) ;
}
// build/src/dom/createCanvas.js
function createCanvas ( { width , height } ) {
2020-10-09 03:31:31 +02:00
const { createCanvasElement } = env . getEnv ( ) ;
2020-09-16 18:46:34 +02:00
const canvas = createCanvasElement ( ) ;
canvas . width = width ;
canvas . height = height ;
return canvas ;
}
function createCanvasFromMedia ( media , dims ) {
2020-10-09 03:31:31 +02:00
const { ImageData : ImageData2 } = env . getEnv ( ) ;
2020-09-16 18:46:34 +02:00
if ( ! ( media instanceof ImageData2 ) && ! isMediaLoaded ( media ) ) {
throw new Error ( "createCanvasFromMedia - media has not finished loading yet" ) ;
}
const { width , height } = dims || getMediaDimensions ( media ) ;
const canvas = createCanvas ( { width , height } ) ;
if ( media instanceof ImageData2 ) {
getContext2dOrThrow ( canvas ) . putImageData ( media , 0 , 0 ) ;
} else {
getContext2dOrThrow ( canvas ) . drawImage ( media , 0 , 0 , width , height ) ;
}
return canvas ;
}
// build/src/dom/imageTensorToCanvas.js
2020-10-09 03:31:31 +02:00
import {
browser ,
tidy as tidy3
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
async function imageTensorToCanvas ( imgTensor , canvas ) {
2020-10-09 03:31:31 +02:00
const targetCanvas = canvas || env . getEnv ( ) . createCanvasElement ( ) ;
2020-09-16 18:46:34 +02:00
const [ height , width , numChannels ] = imgTensor . shape . slice ( isTensor4D ( imgTensor ) ? 1 : 0 ) ;
2020-10-09 03:31:31 +02:00
const imgTensor3D = tidy3 ( ( ) => imgTensor . as3D ( height , width , numChannels ) . toInt ( ) ) ;
await browser . toPixels ( imgTensor3D , targetCanvas ) ;
2020-09-16 18:46:34 +02:00
imgTensor3D . dispose ( ) ;
return targetCanvas ;
}
// build/src/dom/isMediaElement.js
function isMediaElement ( input ) {
2020-10-09 03:31:31 +02:00
const { Image , Canvas , Video } = env . getEnv ( ) ;
2020-09-16 18:46:34 +02:00
return input instanceof Image || input instanceof Canvas || input instanceof Video ;
}
// build/src/dom/imageToSquare.js
function imageToSquare ( input , inputSize , centerImage = false ) {
2020-10-09 03:31:31 +02:00
const { Image , Canvas } = env . getEnv ( ) ;
2020-09-16 18:46:34 +02:00
if ( ! ( input instanceof Image || input instanceof Canvas ) ) {
throw new Error ( "imageToSquare - expected arg0 to be HTMLImageElement | HTMLCanvasElement" ) ;
}
const dims = getMediaDimensions ( input ) ;
const scale2 = inputSize / Math . max ( dims . height , dims . width ) ;
const width = scale2 * dims . width ;
const height = scale2 * dims . height ;
const targetCanvas = createCanvas ( { width : inputSize , height : inputSize } ) ;
const inputCanvas = input instanceof Canvas ? input : createCanvasFromMedia ( input ) ;
const offset = Math . abs ( width - height ) / 2 ;
const dx = centerImage && width < height ? offset : 0 ;
const dy = centerImage && height < width ? offset : 0 ;
getContext2dOrThrow ( targetCanvas ) . drawImage ( inputCanvas , dx , dy , width , height ) ;
return targetCanvas ;
}
// build/src/dom/NetInput.js
2020-10-09 03:31:31 +02:00
import {
Tensor as Tensor2 ,
browser as browser2 ,
cast as cast2 ,
image ,
stack ,
tidy as tidy4
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
class NetInput {
constructor ( inputs , treatAsBatchInput = false ) {
this . _imageTensors = [ ] ;
this . _canvases = [ ] ;
this . _treatAsBatchInput = false ;
this . _inputDimensions = [ ] ;
if ( ! Array . isArray ( inputs ) ) {
throw new Error ( ` NetInput.constructor - expected inputs to be an Array of TResolvedNetInput or to be instanceof tf.Tensor4D, instead have ${ inputs } ` ) ;
}
this . _treatAsBatchInput = treatAsBatchInput ;
this . _batchSize = inputs . length ;
inputs . forEach ( ( input , idx ) => {
if ( isTensor3D ( input ) ) {
this . _imageTensors [ idx ] = input ;
this . _inputDimensions [ idx ] = input . shape ;
return ;
}
if ( isTensor4D ( input ) ) {
const batchSize = input . shape [ 0 ] ;
if ( batchSize !== 1 ) {
throw new Error ( ` NetInput - tf.Tensor4D with batchSize ${ batchSize } passed, but not supported in input array ` ) ;
}
this . _imageTensors [ idx ] = input ;
this . _inputDimensions [ idx ] = input . shape . slice ( 1 ) ;
return ;
}
2020-10-09 03:31:31 +02:00
const canvas = input instanceof env . getEnv ( ) . Canvas ? input : createCanvasFromMedia ( input ) ;
2020-09-16 18:46:34 +02:00
this . _canvases [ idx ] = canvas ;
this . _inputDimensions [ idx ] = [ canvas . height , canvas . width , 3 ] ;
} ) ;
}
get imageTensors ( ) {
return this . _imageTensors ;
}
get canvases ( ) {
return this . _canvases ;
}
get isBatchInput ( ) {
return this . batchSize > 1 || this . _treatAsBatchInput ;
}
get batchSize ( ) {
return this . _batchSize ;
}
get inputDimensions ( ) {
return this . _inputDimensions ;
}
get inputSize ( ) {
return this . _inputSize ;
}
get reshapedInputDimensions ( ) {
2020-10-09 03:31:31 +02:00
return range ( this . batchSize , 0 , 1 ) . map ( ( _ , batchIdx ) => this . getReshapedInputDimensions ( batchIdx ) ) ;
2020-09-16 18:46:34 +02:00
}
getInput ( batchIdx ) {
return this . canvases [ batchIdx ] || this . imageTensors [ batchIdx ] ;
}
getInputDimensions ( batchIdx ) {
return this . _inputDimensions [ batchIdx ] ;
}
getInputHeight ( batchIdx ) {
return this . _inputDimensions [ batchIdx ] [ 0 ] ;
}
getInputWidth ( batchIdx ) {
return this . _inputDimensions [ batchIdx ] [ 1 ] ;
}
getReshapedInputDimensions ( batchIdx ) {
if ( typeof this . inputSize !== "number" ) {
throw new Error ( "getReshapedInputDimensions - inputSize not set, toBatchTensor has not been called yet" ) ;
}
const width = this . getInputWidth ( batchIdx ) ;
const height = this . getInputHeight ( batchIdx ) ;
return computeReshapedDimensions ( { width , height } , this . inputSize ) ;
}
toBatchTensor ( inputSize , isCenterInputs = true ) {
this . _inputSize = inputSize ;
2020-10-09 03:31:31 +02:00
return tidy4 ( ( ) => {
const inputTensors = range ( this . batchSize , 0 , 1 ) . map ( ( batchIdx ) => {
2020-09-16 18:46:34 +02:00
const input = this . getInput ( batchIdx ) ;
2020-10-09 03:31:31 +02:00
if ( input instanceof Tensor2 ) {
2020-09-16 18:46:34 +02:00
let imgTensor = isTensor4D ( input ) ? input : input . expandDims ( ) ;
imgTensor = padToSquare ( imgTensor , isCenterInputs ) ;
if ( imgTensor . shape [ 1 ] !== inputSize || imgTensor . shape [ 2 ] !== inputSize ) {
imgTensor = image . resizeBilinear ( imgTensor , [ inputSize , inputSize ] ) ;
}
return imgTensor . as3D ( inputSize , inputSize , 3 ) ;
}
2020-10-09 03:31:31 +02:00
if ( input instanceof env . getEnv ( ) . Canvas ) {
return browser2 . fromPixels ( imageToSquare ( input , inputSize , isCenterInputs ) ) ;
2020-09-16 18:46:34 +02:00
}
throw new Error ( ` toBatchTensor - at batchIdx ${ batchIdx } , expected input to be instanceof tf.Tensor or instanceof HTMLCanvasElement, instead have ${ input } ` ) ;
} ) ;
2020-10-09 03:31:31 +02:00
const batchTensor = stack ( inputTensors . map ( ( t ) => cast2 ( t , "float32" ) ) ) . as4D ( this . batchSize , inputSize , inputSize , 3 ) ;
2020-09-16 18:46:34 +02:00
return batchTensor ;
} ) ;
}
}
// build/src/dom/toNetInput.js
async function toNetInput ( inputs ) {
if ( inputs instanceof NetInput ) {
return inputs ;
}
let inputArgArray = Array . isArray ( inputs ) ? inputs : [ inputs ] ;
if ( ! inputArgArray . length ) {
throw new Error ( "toNetInput - empty array passed as input" ) ;
}
const getIdxHint = ( idx ) => Array . isArray ( inputs ) ? ` at input index ${ idx } : ` : "" ;
const inputArray = inputArgArray . map ( resolveInput ) ;
inputArray . forEach ( ( input , i ) => {
if ( ! isMediaElement ( input ) && ! isTensor3D ( input ) && ! isTensor4D ( input ) ) {
if ( typeof inputArgArray [ i ] === "string" ) {
throw new Error ( ` toNetInput - ${ getIdxHint ( i ) } string passed, but could not resolve HTMLElement for element id ${ inputArgArray [ i ] } ` ) ;
}
throw new Error ( ` toNetInput - ${ getIdxHint ( i ) } expected media to be of type HTMLImageElement | HTMLVideoElement | HTMLCanvasElement | tf.Tensor3D, or to be an element id ` ) ;
}
if ( isTensor4D ( input ) ) {
const batchSize = input . shape [ 0 ] ;
if ( batchSize !== 1 ) {
throw new Error ( ` toNetInput - ${ getIdxHint ( i ) } tf.Tensor4D with batchSize ${ batchSize } passed, but not supported in input array ` ) ;
}
}
} ) ;
await Promise . all ( inputArray . map ( ( input ) => isMediaElement ( input ) && awaitMediaLoaded ( input ) ) ) ;
return new NetInput ( inputArray , Array . isArray ( inputs ) ) ;
}
// build/src/dom/extractFaces.js
async function extractFaces ( input , detections ) {
2020-10-09 03:31:31 +02:00
const { Canvas } = env . getEnv ( ) ;
2020-09-16 18:46:34 +02:00
let canvas = input ;
if ( ! ( input instanceof Canvas ) ) {
const netInput = await toNetInput ( input ) ;
if ( netInput . batchSize > 1 ) {
throw new Error ( "extractFaces - batchSize > 1 not supported" ) ;
}
const tensorOrCanvas = netInput . getInput ( 0 ) ;
canvas = tensorOrCanvas instanceof Canvas ? tensorOrCanvas : await imageTensorToCanvas ( tensorOrCanvas ) ;
}
const ctx = getContext2dOrThrow ( canvas ) ;
const boxes = detections . map ( ( det ) => det instanceof FaceDetection ? det . forSize ( canvas . width , canvas . height ) . box . floor ( ) : det ) . map ( ( box ) => box . clipAtImageBorders ( canvas . width , canvas . height ) ) ;
return boxes . map ( ( { x , y , width , height } ) => {
const faceImg = createCanvas ( { width , height } ) ;
getContext2dOrThrow ( faceImg ) . putImageData ( ctx . getImageData ( x , y , width , height ) , 0 , 0 ) ;
return faceImg ;
} ) ;
}
// build/src/dom/extractFaceTensors.js
2020-10-09 03:31:31 +02:00
import {
slice3d ,
tidy as tidy5
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
async function extractFaceTensors ( imageTensor , detections ) {
if ( ! isTensor3D ( imageTensor ) && ! isTensor4D ( imageTensor ) ) {
throw new Error ( "extractFaceTensors - expected image tensor to be 3D or 4D" ) ;
}
if ( isTensor4D ( imageTensor ) && imageTensor . shape [ 0 ] > 1 ) {
throw new Error ( "extractFaceTensors - batchSize > 1 not supported" ) ;
}
2020-10-09 03:31:31 +02:00
return tidy5 ( ( ) => {
2020-09-16 18:46:34 +02:00
const [ imgHeight , imgWidth , numChannels ] = imageTensor . shape . slice ( isTensor4D ( imageTensor ) ? 1 : 0 ) ;
const boxes = detections . map ( ( det ) => det instanceof FaceDetection ? det . forSize ( imgWidth , imgHeight ) . box : det ) . map ( ( box ) => box . clipAtImageBorders ( imgWidth , imgHeight ) ) ;
const faceTensors = boxes . map ( ( { x , y , width , height } ) => slice3d ( imageTensor . as3D ( imgHeight , imgWidth , numChannels ) , [ y , x , 0 ] , [ height , width , numChannels ] ) ) ;
return faceTensors ;
} ) ;
}
// build/src/dom/fetchOrThrow.js
async function fetchOrThrow ( url , init ) {
2020-10-09 03:31:31 +02:00
const fetch = env . getEnv ( ) . fetch ;
const res = await fetch ( url , init ) ;
2020-09-16 18:46:34 +02:00
if ( ! ( res . status < 400 ) ) {
throw new Error ( ` failed to fetch: ( ${ res . status } ) ${ res . statusText } , from url: ${ res . url } ` ) ;
}
return res ;
}
// build/src/dom/fetchImage.js
async function fetchImage ( uri ) {
const res = await fetchOrThrow ( uri ) ;
const blob = await res . blob ( ) ;
if ( ! blob . type . startsWith ( "image/" ) ) {
throw new Error ( ` fetchImage - expected blob type to be of type image/*, instead have: ${ blob . type } , for url: ${ res . url } ` ) ;
}
return bufferToImage ( blob ) ;
}
// build/src/dom/fetchJson.js
async function fetchJson ( uri ) {
return ( await fetchOrThrow ( uri ) ) . json ( ) ;
}
// build/src/dom/fetchNetWeights.js
async function fetchNetWeights ( uri ) {
return new Float32Array ( await ( await fetchOrThrow ( uri ) ) . arrayBuffer ( ) ) ;
}
// build/src/common/getModelUris.js
function getModelUris ( uri , defaultModelName ) {
const defaultManifestFilename = ` ${ defaultModelName } -weights_manifest.json ` ;
if ( ! uri ) {
return {
modelBaseUri : "" ,
manifestUri : defaultManifestFilename
} ;
}
if ( uri === "/" ) {
return {
modelBaseUri : "/" ,
manifestUri : ` / ${ defaultManifestFilename } `
} ;
}
const protocol = uri . startsWith ( "http://" ) ? "http://" : uri . startsWith ( "https://" ) ? "https://" : "" ;
uri = uri . replace ( protocol , "" ) ;
const parts = uri . split ( "/" ) . filter ( ( s ) => s ) ;
const manifestFile = uri . endsWith ( ".json" ) ? parts [ parts . length - 1 ] : defaultManifestFilename ;
let modelBaseUri = protocol + ( uri . endsWith ( ".json" ) ? parts . slice ( 0 , parts . length - 1 ) : parts ) . join ( "/" ) ;
modelBaseUri = uri . startsWith ( "/" ) ? ` / ${ modelBaseUri } ` : modelBaseUri ;
return {
modelBaseUri ,
manifestUri : modelBaseUri === "/" ? ` / ${ manifestFile } ` : ` ${ modelBaseUri } / ${ manifestFile } `
} ;
}
// build/src/dom/loadWeightMap.js
2020-10-09 03:31:31 +02:00
import {
io
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
async function loadWeightMap ( uri , defaultModelName ) {
const { manifestUri , modelBaseUri } = getModelUris ( uri , defaultModelName ) ;
let manifest = await fetchJson ( manifestUri ) ;
2020-10-09 03:31:31 +02:00
return io . loadWeights ( manifest , modelBaseUri ) ;
2020-09-16 18:46:34 +02:00
}
// build/src/dom/matchDimensions.js
function matchDimensions ( input , reference , useMediaDimensions = false ) {
const { width , height } = useMediaDimensions ? getMediaDimensions ( reference ) : reference ;
input . width = width ;
input . height = height ;
return { width , height } ;
}
// build/src/dom/types.js
// build/src/dom/index.js
// build/src/NeuralNetwork.js
2020-10-09 03:31:31 +02:00
import {
Tensor as Tensor3 ,
Variable ,
io as io2 ,
tensor
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
class NeuralNetwork {
constructor ( _name ) {
this . _name = _name ;
this . _params = void 0 ;
this . _paramMappings = [ ] ;
}
get params ( ) {
return this . _params ;
}
get paramMappings ( ) {
return this . _paramMappings ;
}
get isLoaded ( ) {
return ! ! this . params ;
}
getParamFromPath ( paramPath ) {
const { obj , objProp } = this . traversePropertyPath ( paramPath ) ;
return obj [ objProp ] ;
}
2020-10-09 03:31:31 +02:00
reassignParamFromPath ( paramPath , tensor2 ) {
2020-09-16 18:46:34 +02:00
const { obj , objProp } = this . traversePropertyPath ( paramPath ) ;
obj [ objProp ] . dispose ( ) ;
2020-10-09 03:31:31 +02:00
obj [ objProp ] = tensor2 ;
2020-09-16 18:46:34 +02:00
}
getParamList ( ) {
return this . _paramMappings . map ( ( { paramPath } ) => ( {
path : paramPath ,
tensor : this . getParamFromPath ( paramPath )
} ) ) ;
}
getTrainableParams ( ) {
return this . getParamList ( ) . filter ( ( param ) => param . tensor instanceof Variable ) ;
}
getFrozenParams ( ) {
return this . getParamList ( ) . filter ( ( param ) => ! ( param . tensor instanceof Variable ) ) ;
}
variable ( ) {
2020-10-09 03:31:31 +02:00
this . getFrozenParams ( ) . forEach ( ( { path , tensor : tensor2 } ) => {
this . reassignParamFromPath ( path , tensor2 . variable ( ) ) ;
2020-09-16 18:46:34 +02:00
} ) ;
}
freeze ( ) {
2020-10-09 03:31:31 +02:00
this . getTrainableParams ( ) . forEach ( ( { path , tensor : variable } ) => {
const tensor2 = tensor ( variable . dataSync ( ) ) ;
variable . dispose ( ) ;
this . reassignParamFromPath ( path , tensor2 ) ;
2020-09-16 18:46:34 +02:00
} ) ;
}
dispose ( throwOnRedispose = true ) {
this . getParamList ( ) . forEach ( ( param ) => {
if ( throwOnRedispose && param . tensor . isDisposed ) {
throw new Error ( ` param tensor has already been disposed for path ${ param . path } ` ) ;
}
param . tensor . dispose ( ) ;
} ) ;
this . _params = void 0 ;
}
serializeParams ( ) {
2020-10-09 03:31:31 +02:00
return new Float32Array ( this . getParamList ( ) . map ( ( { tensor : tensor2 } ) => Array . from ( tensor2 . dataSync ( ) ) ) . reduce ( ( flat , arr ) => flat . concat ( arr ) ) ) ;
2020-09-16 18:46:34 +02:00
}
async load ( weightsOrUrl ) {
if ( weightsOrUrl instanceof Float32Array ) {
this . extractWeights ( weightsOrUrl ) ;
return ;
}
await this . loadFromUri ( weightsOrUrl ) ;
}
async loadFromUri ( uri ) {
if ( uri && typeof uri !== "string" ) {
throw new Error ( ` ${ this . _name } .loadFromUri - expected model uri ` ) ;
}
const weightMap = await loadWeightMap ( uri , this . getDefaultModelName ( ) ) ;
this . loadFromWeightMap ( weightMap ) ;
}
async loadFromDisk ( filePath ) {
if ( filePath && typeof filePath !== "string" ) {
throw new Error ( ` ${ this . _name } .loadFromDisk - expected model file path ` ) ;
}
2020-10-09 03:31:31 +02:00
const { readFile } = env . getEnv ( ) ;
2020-09-16 18:46:34 +02:00
const { manifestUri , modelBaseUri } = getModelUris ( filePath , this . getDefaultModelName ( ) ) ;
const fetchWeightsFromDisk = ( filePaths ) => Promise . all ( filePaths . map ( ( filePath2 ) => readFile ( filePath2 ) . then ( ( buf ) => buf . buffer ) ) ) ;
2020-10-09 03:31:31 +02:00
const loadWeights = io2 . weightsLoaderFactory ( fetchWeightsFromDisk ) ;
2020-09-16 18:46:34 +02:00
const manifest = JSON . parse ( ( await readFile ( manifestUri ) ) . toString ( ) ) ;
2020-10-09 03:31:31 +02:00
const weightMap = await loadWeights ( manifest , modelBaseUri ) ;
2020-09-16 18:46:34 +02:00
this . loadFromWeightMap ( weightMap ) ;
}
loadFromWeightMap ( weightMap ) {
const { paramMappings , params } = this . extractParamsFromWeigthMap ( weightMap ) ;
this . _paramMappings = paramMappings ;
this . _params = params ;
}
extractWeights ( weights ) {
const { paramMappings , params } = this . extractParams ( weights ) ;
this . _paramMappings = paramMappings ;
this . _params = params ;
}
traversePropertyPath ( paramPath ) {
if ( ! this . params ) {
throw new Error ( ` traversePropertyPath - model has no loaded params ` ) ;
}
const result = paramPath . split ( "/" ) . reduce ( ( res , objProp2 ) => {
if ( ! res . nextObj . hasOwnProperty ( objProp2 ) ) {
throw new Error ( ` traversePropertyPath - object does not have property ${ objProp2 } , for path ${ paramPath } ` ) ;
}
return { obj : res . nextObj , objProp : objProp2 , nextObj : res . nextObj [ objProp2 ] } ;
} , { nextObj : this . params } ) ;
const { obj , objProp } = result ;
2020-10-09 03:31:31 +02:00
if ( ! obj || ! objProp || ! ( obj [ objProp ] instanceof Tensor3 ) ) {
2020-09-16 18:46:34 +02:00
throw new Error ( ` traversePropertyPath - parameter is not a tensor, for path ${ paramPath } ` ) ;
}
return { obj , objProp } ;
}
}
// build/src/common/depthwiseSeparableConv.js
2020-10-09 03:31:31 +02:00
import {
add ,
separableConv2d ,
tidy as tidy6
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function depthwiseSeparableConv ( x , params , stride ) {
2020-10-09 03:31:31 +02:00
return tidy6 ( ( ) => {
2020-09-16 18:46:34 +02:00
let out = separableConv2d ( x , params . depthwise _filter , params . pointwise _filter , stride , "same" ) ;
2020-10-09 03:31:31 +02:00
out = add ( out , params . bias ) ;
2020-09-16 18:46:34 +02:00
return out ;
} ) ;
}
// build/src/faceFeatureExtractor/denseBlock.js
2020-10-09 03:31:31 +02:00
import {
add as add2 ,
conv2d ,
relu ,
tidy as tidy7
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function denseBlock3 ( x , denseBlockParams , isFirstLayer = false ) {
2020-10-09 03:31:31 +02:00
return tidy7 ( ( ) => {
2020-09-16 18:46:34 +02:00
const out1 = relu ( isFirstLayer ? add2 ( conv2d ( x , denseBlockParams . conv0 . filters , [ 2 , 2 ] , "same" ) , denseBlockParams . conv0 . bias ) : depthwiseSeparableConv ( x , denseBlockParams . conv0 , [ 2 , 2 ] ) ) ;
const out2 = depthwiseSeparableConv ( out1 , denseBlockParams . conv1 , [ 1 , 1 ] ) ;
const in3 = relu ( add2 ( out1 , out2 ) ) ;
const out3 = depthwiseSeparableConv ( in3 , denseBlockParams . conv2 , [ 1 , 1 ] ) ;
return relu ( add2 ( out1 , add2 ( out2 , out3 ) ) ) ;
} ) ;
}
function denseBlock4 ( x , denseBlockParams , isFirstLayer = false , isScaleDown = true ) {
2020-10-09 03:31:31 +02:00
return tidy7 ( ( ) => {
2020-09-16 18:46:34 +02:00
const out1 = relu ( isFirstLayer ? add2 ( conv2d ( x , denseBlockParams . conv0 . filters , isScaleDown ? [ 2 , 2 ] : [ 1 , 1 ] , "same" ) , denseBlockParams . conv0 . bias ) : depthwiseSeparableConv ( x , denseBlockParams . conv0 , isScaleDown ? [ 2 , 2 ] : [ 1 , 1 ] ) ) ;
const out2 = depthwiseSeparableConv ( out1 , denseBlockParams . conv1 , [ 1 , 1 ] ) ;
const in3 = relu ( add2 ( out1 , out2 ) ) ;
const out3 = depthwiseSeparableConv ( in3 , denseBlockParams . conv2 , [ 1 , 1 ] ) ;
const in4 = relu ( add2 ( out1 , add2 ( out2 , out3 ) ) ) ;
const out4 = depthwiseSeparableConv ( in4 , denseBlockParams . conv3 , [ 1 , 1 ] ) ;
return relu ( add2 ( out1 , add2 ( out2 , add2 ( out3 , out4 ) ) ) ) ;
} ) ;
}
// build/src/common/convLayer.js
2020-10-09 03:31:31 +02:00
import {
add as add3 ,
conv2d as conv2d2 ,
relu as relu2 ,
tidy as tidy8
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function convLayer ( x , params , padding = "same" , withRelu = false ) {
2020-10-09 03:31:31 +02:00
return tidy8 ( ( ) => {
const out = add3 ( conv2d2 ( x , params . filters , [ 1 , 1 ] , padding ) , params . bias ) ;
return withRelu ? relu2 ( out ) : out ;
2020-09-16 18:46:34 +02:00
} ) ;
}
// build/src/common/disposeUnusedWeightTensors.js
function disposeUnusedWeightTensors ( weightMap , paramMappings ) {
Object . keys ( weightMap ) . forEach ( ( path ) => {
if ( ! paramMappings . some ( ( pm ) => pm . originalPath === path ) ) {
weightMap [ path ] . dispose ( ) ;
}
} ) ;
}
// build/src/common/extractConvParamsFactory.js
2020-10-09 03:31:31 +02:00
import {
tensor1d ,
tensor4d
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function extractConvParamsFactory ( extractWeights , paramMappings ) {
return function ( channelsIn , channelsOut , filterSize , mappedPrefix ) {
const filters = tensor4d ( extractWeights ( channelsIn * channelsOut * filterSize * filterSize ) , [ filterSize , filterSize , channelsIn , channelsOut ] ) ;
const bias = tensor1d ( extractWeights ( channelsOut ) ) ;
paramMappings . push ( { paramPath : ` ${ mappedPrefix } /filters ` } , { paramPath : ` ${ mappedPrefix } /bias ` } ) ;
return { filters , bias } ;
} ;
}
// build/src/common/extractFCParamsFactory.js
2020-10-09 03:31:31 +02:00
import {
tensor1d as tensor1d2 ,
tensor2d
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function extractFCParamsFactory ( extractWeights , paramMappings ) {
return function ( channelsIn , channelsOut , mappedPrefix ) {
const fc _weights = tensor2d ( extractWeights ( channelsIn * channelsOut ) , [ channelsIn , channelsOut ] ) ;
2020-10-09 03:31:31 +02:00
const fc _bias = tensor1d2 ( extractWeights ( channelsOut ) ) ;
2020-09-16 18:46:34 +02:00
paramMappings . push ( { paramPath : ` ${ mappedPrefix } /weights ` } , { paramPath : ` ${ mappedPrefix } /bias ` } ) ;
return {
weights : fc _weights ,
bias : fc _bias
} ;
} ;
}
// build/src/common/types.js
class SeparableConvParams {
constructor ( depthwise _filter , pointwise _filter , bias ) {
this . depthwise _filter = depthwise _filter ;
this . pointwise _filter = pointwise _filter ;
this . bias = bias ;
}
}
// build/src/common/extractSeparableConvParamsFactory.js
2020-10-09 03:31:31 +02:00
import {
tensor1d as tensor1d3 ,
tensor4d as tensor4d2
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function extractSeparableConvParamsFactory ( extractWeights , paramMappings ) {
return function ( channelsIn , channelsOut , mappedPrefix ) {
2020-10-09 03:31:31 +02:00
const depthwise _filter = tensor4d2 ( extractWeights ( 3 * 3 * channelsIn ) , [ 3 , 3 , channelsIn , 1 ] ) ;
const pointwise _filter = tensor4d2 ( extractWeights ( channelsIn * channelsOut ) , [ 1 , 1 , channelsIn , channelsOut ] ) ;
const bias = tensor1d3 ( extractWeights ( channelsOut ) ) ;
2020-09-16 18:46:34 +02:00
paramMappings . push ( { paramPath : ` ${ mappedPrefix } /depthwise_filter ` } , { paramPath : ` ${ mappedPrefix } /pointwise_filter ` } , { paramPath : ` ${ mappedPrefix } /bias ` } ) ;
return new SeparableConvParams ( depthwise _filter , pointwise _filter , bias ) ;
} ;
}
function loadSeparableConvParamsFactory ( extractWeightEntry ) {
return function ( prefix ) {
const depthwise _filter = extractWeightEntry ( ` ${ prefix } /depthwise_filter ` , 4 ) ;
const pointwise _filter = extractWeightEntry ( ` ${ prefix } /pointwise_filter ` , 4 ) ;
const bias = extractWeightEntry ( ` ${ prefix } /bias ` , 1 ) ;
return new SeparableConvParams ( depthwise _filter , pointwise _filter , bias ) ;
} ;
}
// build/src/common/extractWeightEntryFactory.js
function extractWeightEntryFactory ( weightMap , paramMappings ) {
return function ( originalPath , paramRank , mappedPath ) {
2020-10-09 03:31:31 +02:00
const tensor2 = weightMap [ originalPath ] ;
if ( ! isTensor ( tensor2 , paramRank ) ) {
throw new Error ( ` expected weightMap[ ${ originalPath } ] to be a Tensor ${ paramRank } D, instead have ${ tensor2 } ` ) ;
2020-09-16 18:46:34 +02:00
}
paramMappings . push ( { originalPath , paramPath : mappedPath || originalPath } ) ;
2020-10-09 03:31:31 +02:00
return tensor2 ;
2020-09-16 18:46:34 +02:00
} ;
}
// build/src/common/extractWeightsFactory.js
function extractWeightsFactory ( weights ) {
let remainingWeights = weights ;
function extractWeights ( numWeights ) {
const ret = remainingWeights . slice ( 0 , numWeights ) ;
remainingWeights = remainingWeights . slice ( numWeights ) ;
return ret ;
}
function getRemainingWeights ( ) {
return remainingWeights ;
}
return {
extractWeights ,
getRemainingWeights
} ;
}
// build/src/common/index.js
// build/src/faceFeatureExtractor/extractorsFactory.js
function extractorsFactory ( extractWeights , paramMappings ) {
const extractConvParams = extractConvParamsFactory ( extractWeights , paramMappings ) ;
const extractSeparableConvParams = extractSeparableConvParamsFactory ( extractWeights , paramMappings ) ;
function extractDenseBlock3Params ( channelsIn , channelsOut , mappedPrefix , isFirstLayer = false ) {
const conv0 = isFirstLayer ? extractConvParams ( channelsIn , channelsOut , 3 , ` ${ mappedPrefix } /conv0 ` ) : extractSeparableConvParams ( channelsIn , channelsOut , ` ${ mappedPrefix } /conv0 ` ) ;
const conv1 = extractSeparableConvParams ( channelsOut , channelsOut , ` ${ mappedPrefix } /conv1 ` ) ;
const conv22 = extractSeparableConvParams ( channelsOut , channelsOut , ` ${ mappedPrefix } /conv2 ` ) ;
return { conv0 , conv1 , conv2 : conv22 } ;
}
function extractDenseBlock4Params ( channelsIn , channelsOut , mappedPrefix , isFirstLayer = false ) {
const { conv0 , conv1 , conv2 : conv22 } = extractDenseBlock3Params ( channelsIn , channelsOut , mappedPrefix , isFirstLayer ) ;
const conv3 = extractSeparableConvParams ( channelsOut , channelsOut , ` ${ mappedPrefix } /conv3 ` ) ;
return { conv0 , conv1 , conv2 : conv22 , conv3 } ;
}
return {
extractDenseBlock3Params ,
extractDenseBlock4Params
} ;
}
// build/src/faceFeatureExtractor/extractParams.js
function extractParams ( weights ) {
const paramMappings = [ ] ;
const { extractWeights , getRemainingWeights } = extractWeightsFactory ( weights ) ;
const { extractDenseBlock4Params } = extractorsFactory ( extractWeights , paramMappings ) ;
const dense0 = extractDenseBlock4Params ( 3 , 32 , "dense0" , true ) ;
const dense1 = extractDenseBlock4Params ( 32 , 64 , "dense1" ) ;
const dense2 = extractDenseBlock4Params ( 64 , 128 , "dense2" ) ;
const dense3 = extractDenseBlock4Params ( 128 , 256 , "dense3" ) ;
if ( getRemainingWeights ( ) . length !== 0 ) {
throw new Error ( ` weights remaing after extract: ${ getRemainingWeights ( ) . length } ` ) ;
}
return {
paramMappings ,
params : { dense0 , dense1 , dense2 , dense3 }
} ;
}
// build/src/common/loadConvParamsFactory.js
function loadConvParamsFactory ( extractWeightEntry ) {
return function ( prefix ) {
const filters = extractWeightEntry ( ` ${ prefix } /filters ` , 4 ) ;
const bias = extractWeightEntry ( ` ${ prefix } /bias ` , 1 ) ;
return { filters , bias } ;
} ;
}
// build/src/faceFeatureExtractor/loadParamsFactory.js
function loadParamsFactory ( weightMap , paramMappings ) {
const extractWeightEntry = extractWeightEntryFactory ( weightMap , paramMappings ) ;
const extractConvParams = loadConvParamsFactory ( extractWeightEntry ) ;
const extractSeparableConvParams = loadSeparableConvParamsFactory ( extractWeightEntry ) ;
function extractDenseBlock3Params ( prefix , isFirstLayer = false ) {
const conv0 = isFirstLayer ? extractConvParams ( ` ${ prefix } /conv0 ` ) : extractSeparableConvParams ( ` ${ prefix } /conv0 ` ) ;
const conv1 = extractSeparableConvParams ( ` ${ prefix } /conv1 ` ) ;
const conv22 = extractSeparableConvParams ( ` ${ prefix } /conv2 ` ) ;
return { conv0 , conv1 , conv2 : conv22 } ;
}
function extractDenseBlock4Params ( prefix , isFirstLayer = false ) {
const conv0 = isFirstLayer ? extractConvParams ( ` ${ prefix } /conv0 ` ) : extractSeparableConvParams ( ` ${ prefix } /conv0 ` ) ;
const conv1 = extractSeparableConvParams ( ` ${ prefix } /conv1 ` ) ;
const conv22 = extractSeparableConvParams ( ` ${ prefix } /conv2 ` ) ;
const conv3 = extractSeparableConvParams ( ` ${ prefix } /conv3 ` ) ;
return { conv0 , conv1 , conv2 : conv22 , conv3 } ;
}
return {
extractDenseBlock3Params ,
extractDenseBlock4Params
} ;
}
// build/src/faceFeatureExtractor/extractParamsFromWeigthMap.js
function extractParamsFromWeigthMap ( weightMap ) {
const paramMappings = [ ] ;
const { extractDenseBlock4Params } = loadParamsFactory ( weightMap , paramMappings ) ;
const params = {
dense0 : extractDenseBlock4Params ( "dense0" , true ) ,
dense1 : extractDenseBlock4Params ( "dense1" ) ,
dense2 : extractDenseBlock4Params ( "dense2" ) ,
dense3 : extractDenseBlock4Params ( "dense3" )
} ;
disposeUnusedWeightTensors ( weightMap , paramMappings ) ;
return { params , paramMappings } ;
}
// build/src/faceFeatureExtractor/FaceFeatureExtractor.js
2020-10-09 03:31:31 +02:00
import {
avgPool ,
scalar ,
tidy as tidy9
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
class FaceFeatureExtractor extends NeuralNetwork {
constructor ( ) {
super ( "FaceFeatureExtractor" ) ;
}
forwardInput ( input ) {
const { params } = this ;
if ( ! params ) {
throw new Error ( "FaceFeatureExtractor - load model before inference" ) ;
}
2020-10-09 03:31:31 +02:00
return tidy9 ( ( ) => {
2020-09-16 18:46:34 +02:00
const batchTensor = input . toBatchTensor ( 112 , true ) ;
const meanRgb = [ 122.782 , 117.001 , 104.298 ] ;
const normalized = normalize ( batchTensor , meanRgb ) . div ( scalar ( 255 ) ) ;
let out = denseBlock4 ( normalized , params . dense0 , true ) ;
out = denseBlock4 ( out , params . dense1 ) ;
out = denseBlock4 ( out , params . dense2 ) ;
out = denseBlock4 ( out , params . dense3 ) ;
out = avgPool ( out , [ 7 , 7 ] , [ 2 , 2 ] , "valid" ) ;
return out ;
} ) ;
}
async forward ( input ) {
return this . forwardInput ( await toNetInput ( input ) ) ;
}
getDefaultModelName ( ) {
return "face_feature_extractor_model" ;
}
extractParamsFromWeigthMap ( weightMap ) {
return extractParamsFromWeigthMap ( weightMap ) ;
}
extractParams ( weights ) {
return extractParams ( weights ) ;
}
}
// build/src/common/fullyConnectedLayer.js
2020-10-09 03:31:31 +02:00
import {
add as add4 ,
matMul ,
tidy as tidy10
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function fullyConnectedLayer ( x , params ) {
2020-10-09 03:31:31 +02:00
return tidy10 ( ( ) => add4 ( matMul ( x , params . weights ) , params . bias ) ) ;
2020-09-16 18:46:34 +02:00
}
// build/src/faceProcessor/extractParams.js
function extractParams3 ( weights , channelsIn , channelsOut ) {
const paramMappings = [ ] ;
const { extractWeights , getRemainingWeights } = extractWeightsFactory ( weights ) ;
const extractFCParams = extractFCParamsFactory ( extractWeights , paramMappings ) ;
const fc = extractFCParams ( channelsIn , channelsOut , "fc" ) ;
if ( getRemainingWeights ( ) . length !== 0 ) {
throw new Error ( ` weights remaing after extract: ${ getRemainingWeights ( ) . length } ` ) ;
}
return {
paramMappings ,
params : { fc }
} ;
}
// build/src/faceProcessor/extractParamsFromWeigthMap.js
function extractParamsFromWeigthMap3 ( weightMap ) {
const paramMappings = [ ] ;
const extractWeightEntry = extractWeightEntryFactory ( weightMap , paramMappings ) ;
function extractFcParams ( prefix ) {
const weights = extractWeightEntry ( ` ${ prefix } /weights ` , 2 ) ;
const bias = extractWeightEntry ( ` ${ prefix } /bias ` , 1 ) ;
return { weights , bias } ;
}
const params = {
fc : extractFcParams ( "fc" )
} ;
disposeUnusedWeightTensors ( weightMap , paramMappings ) ;
return { params , paramMappings } ;
}
// build/src/faceProcessor/util.js
function seperateWeightMaps ( weightMap ) {
const featureExtractorMap = { } ;
const classifierMap = { } ;
Object . keys ( weightMap ) . forEach ( ( key ) => {
const map = key . startsWith ( "fc" ) ? classifierMap : featureExtractorMap ;
map [ key ] = weightMap [ key ] ;
} ) ;
return { featureExtractorMap , classifierMap } ;
}
// build/src/faceProcessor/FaceProcessor.js
2020-10-09 03:31:31 +02:00
import {
tidy as tidy11
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
class FaceProcessor extends NeuralNetwork {
constructor ( _name , faceFeatureExtractor ) {
super ( _name ) ;
this . _faceFeatureExtractor = faceFeatureExtractor ;
}
get faceFeatureExtractor ( ) {
return this . _faceFeatureExtractor ;
}
runNet ( input ) {
const { params } = this ;
if ( ! params ) {
throw new Error ( ` ${ this . _name } - load model before inference ` ) ;
}
2020-10-09 03:31:31 +02:00
return tidy11 ( ( ) => {
2020-09-16 18:46:34 +02:00
const bottleneckFeatures = input instanceof NetInput ? this . faceFeatureExtractor . forwardInput ( input ) : input ;
return fullyConnectedLayer ( bottleneckFeatures . as2D ( bottleneckFeatures . shape [ 0 ] , - 1 ) , params . fc ) ;
} ) ;
}
dispose ( throwOnRedispose = true ) {
this . faceFeatureExtractor . dispose ( throwOnRedispose ) ;
super . dispose ( throwOnRedispose ) ;
}
loadClassifierParams ( weights ) {
const { params , paramMappings } = this . extractClassifierParams ( weights ) ;
this . _params = params ;
this . _paramMappings = paramMappings ;
}
extractClassifierParams ( weights ) {
return extractParams3 ( weights , this . getClassifierChannelsIn ( ) , this . getClassifierChannelsOut ( ) ) ;
}
extractParamsFromWeigthMap ( weightMap ) {
const { featureExtractorMap , classifierMap } = seperateWeightMaps ( weightMap ) ;
this . faceFeatureExtractor . loadFromWeightMap ( featureExtractorMap ) ;
return extractParamsFromWeigthMap3 ( classifierMap ) ;
}
extractParams ( weights ) {
const cIn = this . getClassifierChannelsIn ( ) ;
const cOut = this . getClassifierChannelsOut ( ) ;
const classifierWeightSize = cOut * cIn + cOut ;
const featureExtractorWeights = weights . slice ( 0 , weights . length - classifierWeightSize ) ;
const classifierWeights = weights . slice ( weights . length - classifierWeightSize ) ;
this . faceFeatureExtractor . extractWeights ( featureExtractorWeights ) ;
return this . extractClassifierParams ( classifierWeights ) ;
}
}
// build/src/faceExpressionNet/FaceExpressions.js
const FACE _EXPRESSION _LABELS = [ "neutral" , "happy" , "sad" , "angry" , "fearful" , "disgusted" , "surprised" ] ;
class FaceExpressions {
constructor ( probabilities ) {
if ( probabilities . length !== 7 ) {
throw new Error ( ` FaceExpressions.constructor - expected probabilities.length to be 7, have: ${ probabilities . length } ` ) ;
}
FACE _EXPRESSION _LABELS . forEach ( ( expression , idx ) => {
this [ expression ] = probabilities [ idx ] ;
} ) ;
}
asSortedArray ( ) {
return FACE _EXPRESSION _LABELS . map ( ( expression ) => ( { expression , probability : this [ expression ] } ) ) . sort ( ( e0 , e1 ) => e1 . probability - e0 . probability ) ;
}
}
// build/src/faceExpressionNet/FaceExpressionNet.js
2020-10-09 03:31:31 +02:00
import {
softmax ,
tidy as tidy12 ,
unstack
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
class FaceExpressionNet extends FaceProcessor {
constructor ( faceFeatureExtractor = new FaceFeatureExtractor ( ) ) {
super ( "FaceExpressionNet" , faceFeatureExtractor ) ;
}
forwardInput ( input ) {
2020-10-09 03:31:31 +02:00
return tidy12 ( ( ) => softmax ( this . runNet ( input ) ) ) ;
2020-09-16 18:46:34 +02:00
}
async forward ( input ) {
return this . forwardInput ( await toNetInput ( input ) ) ;
}
async predictExpressions ( input ) {
const netInput = await toNetInput ( input ) ;
const out = await this . forwardInput ( netInput ) ;
const probabilitesByBatch = await Promise . all ( unstack ( out ) . map ( async ( t ) => {
const data = await t . data ( ) ;
t . dispose ( ) ;
return data ;
} ) ) ;
out . dispose ( ) ;
const predictionsByBatch = probabilitesByBatch . map ( ( probabilites ) => new FaceExpressions ( probabilites ) ) ;
return netInput . isBatchInput ? predictionsByBatch : predictionsByBatch [ 0 ] ;
}
getDefaultModelName ( ) {
return "face_expression_model" ;
}
getClassifierChannelsIn ( ) {
return 256 ;
}
getClassifierChannelsOut ( ) {
return 7 ;
}
}
// build/src/faceExpressionNet/index.js
// build/src/factories/WithFaceExpressions.js
function isWithFaceExpressions ( obj ) {
return obj [ "expressions" ] instanceof FaceExpressions ;
}
function extendWithFaceExpressions ( sourceObj , expressions ) {
const extension = { expressions } ;
return Object . assign ( { } , sourceObj , extension ) ;
}
// build/src/draw/drawFaceExpressions.js
function drawFaceExpressions ( canvasArg , faceExpressions , minConfidence = 0.1 , textFieldAnchor ) {
const faceExpressionsArray = Array . isArray ( faceExpressions ) ? faceExpressions : [ faceExpressions ] ;
faceExpressionsArray . forEach ( ( e ) => {
const expr = e instanceof FaceExpressions ? e : isWithFaceExpressions ( e ) ? e . expressions : void 0 ;
if ( ! expr ) {
throw new Error ( "drawFaceExpressions - expected faceExpressions to be FaceExpressions | WithFaceExpressions<{}> or array thereof" ) ;
}
const sorted = expr . asSortedArray ( ) ;
const resultsToDisplay = sorted . filter ( ( expr2 ) => expr2 . probability > minConfidence ) ;
const anchor = isWithFaceDetection ( e ) ? e . detection . box . bottomLeft : textFieldAnchor || new Point ( 0 , 0 ) ;
2020-10-09 03:31:31 +02:00
const drawTextField = new DrawTextField ( resultsToDisplay . map ( ( expr2 ) => ` ${ expr2 . expression } ( ${ round ( expr2 . probability ) } ) ` ) , anchor ) ;
2020-09-16 18:46:34 +02:00
drawTextField . draw ( canvasArg ) ;
} ) ;
}
// build/src/factories/WithFaceLandmarks.js
function isWithFaceLandmarks ( obj ) {
return isWithFaceDetection ( obj ) && obj [ "landmarks" ] instanceof FaceLandmarks && obj [ "unshiftedLandmarks" ] instanceof FaceLandmarks && obj [ "alignedRect" ] instanceof FaceDetection ;
}
function extendWithFaceLandmarks ( sourceObj , unshiftedLandmarks ) {
const { box : shift } = sourceObj . detection ;
const landmarks = unshiftedLandmarks . shiftBy ( shift . x , shift . y ) ;
const rect = landmarks . align ( ) ;
const { imageDims } = sourceObj . detection ;
const alignedRect = new FaceDetection ( sourceObj . detection . score , rect . rescale ( imageDims . reverse ( ) ) , imageDims ) ;
const extension = {
landmarks ,
unshiftedLandmarks ,
alignedRect
} ;
return Object . assign ( { } , sourceObj , extension ) ;
}
// build/src/draw/DrawFaceLandmarks.js
class DrawFaceLandmarksOptions {
constructor ( options = { } ) {
const { drawLines = true , drawPoints = true , lineWidth , lineColor , pointSize , pointColor } = options ;
this . drawLines = drawLines ;
this . drawPoints = drawPoints ;
this . lineWidth = lineWidth || 1 ;
this . pointSize = pointSize || 2 ;
this . lineColor = lineColor || "rgba(0, 255, 255, 1)" ;
this . pointColor = pointColor || "rgba(255, 0, 255, 1)" ;
}
}
class DrawFaceLandmarks {
constructor ( faceLandmarks , options = { } ) {
this . faceLandmarks = faceLandmarks ;
this . options = new DrawFaceLandmarksOptions ( options ) ;
}
draw ( canvasArg ) {
const ctx = getContext2dOrThrow ( canvasArg ) ;
const { drawLines , drawPoints , lineWidth , lineColor , pointSize , pointColor } = this . options ;
if ( drawLines && this . faceLandmarks instanceof FaceLandmarks68 ) {
ctx . strokeStyle = lineColor ;
ctx . lineWidth = lineWidth ;
drawContour ( ctx , this . faceLandmarks . getJawOutline ( ) ) ;
drawContour ( ctx , this . faceLandmarks . getLeftEyeBrow ( ) ) ;
drawContour ( ctx , this . faceLandmarks . getRightEyeBrow ( ) ) ;
drawContour ( ctx , this . faceLandmarks . getNose ( ) ) ;
drawContour ( ctx , this . faceLandmarks . getLeftEye ( ) , true ) ;
drawContour ( ctx , this . faceLandmarks . getRightEye ( ) , true ) ;
drawContour ( ctx , this . faceLandmarks . getMouth ( ) , true ) ;
}
if ( drawPoints ) {
ctx . strokeStyle = pointColor ;
ctx . fillStyle = pointColor ;
const drawPoint = ( pt ) => {
ctx . beginPath ( ) ;
ctx . arc ( pt . x , pt . y , pointSize , 0 , 2 * Math . PI ) ;
ctx . fill ( ) ;
} ;
this . faceLandmarks . positions . forEach ( drawPoint ) ;
}
}
}
function drawFaceLandmarks ( canvasArg , faceLandmarks ) {
const faceLandmarksArray = Array . isArray ( faceLandmarks ) ? faceLandmarks : [ faceLandmarks ] ;
faceLandmarksArray . forEach ( ( f ) => {
const landmarks = f instanceof FaceLandmarks ? f : isWithFaceLandmarks ( f ) ? f . landmarks : void 0 ;
if ( ! landmarks ) {
throw new Error ( "drawFaceLandmarks - expected faceExpressions to be FaceLandmarks | WithFaceLandmarks<WithFaceDetection<{}>> or array thereof" ) ;
}
new DrawFaceLandmarks ( landmarks ) . draw ( canvasArg ) ;
} ) ;
}
// build/src/draw/index.js
const draw _exports = { } ;
_ _export ( draw _exports , {
AnchorPosition : ( ) => AnchorPosition ,
DrawBox : ( ) => DrawBox ,
DrawBoxOptions : ( ) => DrawBoxOptions ,
DrawFaceLandmarks : ( ) => DrawFaceLandmarks ,
DrawFaceLandmarksOptions : ( ) => DrawFaceLandmarksOptions ,
DrawTextField : ( ) => DrawTextField ,
DrawTextFieldOptions : ( ) => DrawTextFieldOptions ,
drawContour : ( ) => drawContour ,
drawDetections : ( ) => drawDetections ,
drawFaceExpressions : ( ) => drawFaceExpressions ,
drawFaceLandmarks : ( ) => drawFaceLandmarks
} ) ;
// build/src/xception/extractParams.js
function extractorsFactory3 ( extractWeights , paramMappings ) {
const extractConvParams = extractConvParamsFactory ( extractWeights , paramMappings ) ;
const extractSeparableConvParams = extractSeparableConvParamsFactory ( extractWeights , paramMappings ) ;
function extractReductionBlockParams ( channelsIn , channelsOut , mappedPrefix ) {
const separable _conv0 = extractSeparableConvParams ( channelsIn , channelsOut , ` ${ mappedPrefix } /separable_conv0 ` ) ;
const separable _conv1 = extractSeparableConvParams ( channelsOut , channelsOut , ` ${ mappedPrefix } /separable_conv1 ` ) ;
const expansion _conv = extractConvParams ( channelsIn , channelsOut , 1 , ` ${ mappedPrefix } /expansion_conv ` ) ;
return { separable _conv0 , separable _conv1 , expansion _conv } ;
}
function extractMainBlockParams ( channels , mappedPrefix ) {
const separable _conv0 = extractSeparableConvParams ( channels , channels , ` ${ mappedPrefix } /separable_conv0 ` ) ;
const separable _conv1 = extractSeparableConvParams ( channels , channels , ` ${ mappedPrefix } /separable_conv1 ` ) ;
const separable _conv2 = extractSeparableConvParams ( channels , channels , ` ${ mappedPrefix } /separable_conv2 ` ) ;
return { separable _conv0 , separable _conv1 , separable _conv2 } ;
}
return {
extractConvParams ,
extractSeparableConvParams ,
extractReductionBlockParams ,
extractMainBlockParams
} ;
}
function extractParams5 ( weights , numMainBlocks ) {
const paramMappings = [ ] ;
const { extractWeights , getRemainingWeights } = extractWeightsFactory ( weights ) ;
const { extractConvParams , extractSeparableConvParams , extractReductionBlockParams , extractMainBlockParams } = extractorsFactory3 ( extractWeights , paramMappings ) ;
const entry _flow _conv _in = extractConvParams ( 3 , 32 , 3 , "entry_flow/conv_in" ) ;
const entry _flow _reduction _block _0 = extractReductionBlockParams ( 32 , 64 , "entry_flow/reduction_block_0" ) ;
const entry _flow _reduction _block _1 = extractReductionBlockParams ( 64 , 128 , "entry_flow/reduction_block_1" ) ;
const entry _flow = {
conv _in : entry _flow _conv _in ,
reduction _block _0 : entry _flow _reduction _block _0 ,
reduction _block _1 : entry _flow _reduction _block _1
} ;
const middle _flow = { } ;
2020-10-09 03:31:31 +02:00
range ( numMainBlocks , 0 , 1 ) . forEach ( ( idx ) => {
2020-09-16 18:46:34 +02:00
middle _flow [ ` main_block_ ${ idx } ` ] = extractMainBlockParams ( 128 , ` middle_flow/main_block_ ${ idx } ` ) ;
} ) ;
const exit _flow _reduction _block = extractReductionBlockParams ( 128 , 256 , "exit_flow/reduction_block" ) ;
const exit _flow _separable _conv = extractSeparableConvParams ( 256 , 512 , "exit_flow/separable_conv" ) ;
const exit _flow = {
reduction _block : exit _flow _reduction _block ,
separable _conv : exit _flow _separable _conv
} ;
if ( getRemainingWeights ( ) . length !== 0 ) {
throw new Error ( ` weights remaing after extract: ${ getRemainingWeights ( ) . length } ` ) ;
}
return {
paramMappings ,
params : { entry _flow , middle _flow , exit _flow }
} ;
}
// build/src/xception/extractParamsFromWeigthMap.js
function loadParamsFactory3 ( weightMap , paramMappings ) {
const extractWeightEntry = extractWeightEntryFactory ( weightMap , paramMappings ) ;
const extractConvParams = loadConvParamsFactory ( extractWeightEntry ) ;
const extractSeparableConvParams = loadSeparableConvParamsFactory ( extractWeightEntry ) ;
function extractReductionBlockParams ( mappedPrefix ) {
const separable _conv0 = extractSeparableConvParams ( ` ${ mappedPrefix } /separable_conv0 ` ) ;
const separable _conv1 = extractSeparableConvParams ( ` ${ mappedPrefix } /separable_conv1 ` ) ;
const expansion _conv = extractConvParams ( ` ${ mappedPrefix } /expansion_conv ` ) ;
return { separable _conv0 , separable _conv1 , expansion _conv } ;
}
function extractMainBlockParams ( mappedPrefix ) {
const separable _conv0 = extractSeparableConvParams ( ` ${ mappedPrefix } /separable_conv0 ` ) ;
const separable _conv1 = extractSeparableConvParams ( ` ${ mappedPrefix } /separable_conv1 ` ) ;
const separable _conv2 = extractSeparableConvParams ( ` ${ mappedPrefix } /separable_conv2 ` ) ;
return { separable _conv0 , separable _conv1 , separable _conv2 } ;
}
return {
extractConvParams ,
extractSeparableConvParams ,
extractReductionBlockParams ,
extractMainBlockParams
} ;
}
function extractParamsFromWeigthMap5 ( weightMap , numMainBlocks ) {
const paramMappings = [ ] ;
const { extractConvParams , extractSeparableConvParams , extractReductionBlockParams , extractMainBlockParams } = loadParamsFactory3 ( weightMap , paramMappings ) ;
const entry _flow _conv _in = extractConvParams ( "entry_flow/conv_in" ) ;
const entry _flow _reduction _block _0 = extractReductionBlockParams ( "entry_flow/reduction_block_0" ) ;
const entry _flow _reduction _block _1 = extractReductionBlockParams ( "entry_flow/reduction_block_1" ) ;
const entry _flow = {
conv _in : entry _flow _conv _in ,
reduction _block _0 : entry _flow _reduction _block _0 ,
reduction _block _1 : entry _flow _reduction _block _1
} ;
const middle _flow = { } ;
2020-10-09 03:31:31 +02:00
range ( numMainBlocks , 0 , 1 ) . forEach ( ( idx ) => {
2020-09-16 18:46:34 +02:00
middle _flow [ ` main_block_ ${ idx } ` ] = extractMainBlockParams ( ` middle_flow/main_block_ ${ idx } ` ) ;
} ) ;
const exit _flow _reduction _block = extractReductionBlockParams ( "exit_flow/reduction_block" ) ;
const exit _flow _separable _conv = extractSeparableConvParams ( "exit_flow/separable_conv" ) ;
const exit _flow = {
reduction _block : exit _flow _reduction _block ,
separable _conv : exit _flow _separable _conv
} ;
disposeUnusedWeightTensors ( weightMap , paramMappings ) ;
return { params : { entry _flow , middle _flow , exit _flow } , paramMappings } ;
}
// build/src/xception/TinyXception.js
2020-10-09 03:31:31 +02:00
import {
add as add5 ,
conv2d as conv2d3 ,
maxPool ,
relu as relu3 ,
scalar as scalar2 ,
tidy as tidy13
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function conv ( x , params , stride ) {
2020-10-09 03:31:31 +02:00
return add5 ( conv2d3 ( x , params . filters , stride , "same" ) , params . bias ) ;
2020-09-16 18:46:34 +02:00
}
function reductionBlock ( x , params , isActivateInput = true ) {
2020-10-09 03:31:31 +02:00
let out = isActivateInput ? relu3 ( x ) : x ;
2020-09-16 18:46:34 +02:00
out = depthwiseSeparableConv ( out , params . separable _conv0 , [ 1 , 1 ] ) ;
2020-10-09 03:31:31 +02:00
out = depthwiseSeparableConv ( relu3 ( out ) , params . separable _conv1 , [ 1 , 1 ] ) ;
2020-09-16 18:46:34 +02:00
out = maxPool ( out , [ 3 , 3 ] , [ 2 , 2 ] , "same" ) ;
2020-10-09 03:31:31 +02:00
out = add5 ( out , conv ( x , params . expansion _conv , [ 2 , 2 ] ) ) ;
2020-09-16 18:46:34 +02:00
return out ;
}
function mainBlock ( x , params ) {
2020-10-09 03:31:31 +02:00
let out = depthwiseSeparableConv ( relu3 ( x ) , params . separable _conv0 , [ 1 , 1 ] ) ;
out = depthwiseSeparableConv ( relu3 ( out ) , params . separable _conv1 , [ 1 , 1 ] ) ;
out = depthwiseSeparableConv ( relu3 ( out ) , params . separable _conv2 , [ 1 , 1 ] ) ;
out = add5 ( out , x ) ;
2020-09-16 18:46:34 +02:00
return out ;
}
class TinyXception extends NeuralNetwork {
constructor ( numMainBlocks ) {
super ( "TinyXception" ) ;
this . _numMainBlocks = numMainBlocks ;
}
forwardInput ( input ) {
const { params } = this ;
if ( ! params ) {
throw new Error ( "TinyXception - load model before inference" ) ;
}
2020-10-09 03:31:31 +02:00
return tidy13 ( ( ) => {
2020-09-16 18:46:34 +02:00
const batchTensor = input . toBatchTensor ( 112 , true ) ;
const meanRgb = [ 122.782 , 117.001 , 104.298 ] ;
2020-10-09 03:31:31 +02:00
const normalized = normalize ( batchTensor , meanRgb ) . div ( scalar2 ( 256 ) ) ;
let out = relu3 ( conv ( normalized , params . entry _flow . conv _in , [ 2 , 2 ] ) ) ;
2020-09-16 18:46:34 +02:00
out = reductionBlock ( out , params . entry _flow . reduction _block _0 , false ) ;
out = reductionBlock ( out , params . entry _flow . reduction _block _1 ) ;
2020-10-09 03:31:31 +02:00
range ( this . _numMainBlocks , 0 , 1 ) . forEach ( ( idx ) => {
2020-09-16 18:46:34 +02:00
out = mainBlock ( out , params . middle _flow [ ` main_block_ ${ idx } ` ] ) ;
} ) ;
out = reductionBlock ( out , params . exit _flow . reduction _block ) ;
2020-10-09 03:31:31 +02:00
out = relu3 ( depthwiseSeparableConv ( out , params . exit _flow . separable _conv , [ 1 , 1 ] ) ) ;
2020-09-16 18:46:34 +02:00
return out ;
} ) ;
}
async forward ( input ) {
return this . forwardInput ( await toNetInput ( input ) ) ;
}
getDefaultModelName ( ) {
return "tiny_xception_model" ;
}
extractParamsFromWeigthMap ( weightMap ) {
return extractParamsFromWeigthMap5 ( weightMap , this . _numMainBlocks ) ;
}
extractParams ( weights ) {
return extractParams5 ( weights , this . _numMainBlocks ) ;
}
}
// build/src/ageGenderNet/extractParams.js
function extractParams7 ( weights ) {
const paramMappings = [ ] ;
const { extractWeights , getRemainingWeights } = extractWeightsFactory ( weights ) ;
const extractFCParams = extractFCParamsFactory ( extractWeights , paramMappings ) ;
const age = extractFCParams ( 512 , 1 , "fc/age" ) ;
const gender = extractFCParams ( 512 , 2 , "fc/gender" ) ;
if ( getRemainingWeights ( ) . length !== 0 ) {
throw new Error ( ` weights remaing after extract: ${ getRemainingWeights ( ) . length } ` ) ;
}
return {
paramMappings ,
params : { fc : { age , gender } }
} ;
}
// build/src/ageGenderNet/extractParamsFromWeigthMap.js
function extractParamsFromWeigthMap7 ( weightMap ) {
const paramMappings = [ ] ;
const extractWeightEntry = extractWeightEntryFactory ( weightMap , paramMappings ) ;
function extractFcParams ( prefix ) {
const weights = extractWeightEntry ( ` ${ prefix } /weights ` , 2 ) ;
const bias = extractWeightEntry ( ` ${ prefix } /bias ` , 1 ) ;
return { weights , bias } ;
}
const params = {
fc : {
age : extractFcParams ( "fc/age" ) ,
gender : extractFcParams ( "fc/gender" )
}
} ;
disposeUnusedWeightTensors ( weightMap , paramMappings ) ;
return { params , paramMappings } ;
}
// build/src/ageGenderNet/types.js
var Gender ;
( function ( Gender2 ) {
Gender2 [ "FEMALE" ] = "female" ;
Gender2 [ "MALE" ] = "male" ;
} ) ( Gender || ( Gender = { } ) ) ;
// build/src/ageGenderNet/AgeGenderNet.js
2020-10-09 03:31:31 +02:00
import {
avgPool as avgPool2 ,
softmax as softmax2 ,
tidy as tidy14 ,
unstack as unstack2
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
class AgeGenderNet extends NeuralNetwork {
constructor ( faceFeatureExtractor = new TinyXception ( 2 ) ) {
super ( "AgeGenderNet" ) ;
this . _faceFeatureExtractor = faceFeatureExtractor ;
}
get faceFeatureExtractor ( ) {
return this . _faceFeatureExtractor ;
}
runNet ( input ) {
const { params } = this ;
if ( ! params ) {
throw new Error ( ` ${ this . _name } - load model before inference ` ) ;
}
2020-10-09 03:31:31 +02:00
return tidy14 ( ( ) => {
2020-09-16 18:46:34 +02:00
const bottleneckFeatures = input instanceof NetInput ? this . faceFeatureExtractor . forwardInput ( input ) : input ;
2020-10-09 03:31:31 +02:00
const pooled = avgPool2 ( bottleneckFeatures , [ 7 , 7 ] , [ 2 , 2 ] , "valid" ) . as2D ( bottleneckFeatures . shape [ 0 ] , - 1 ) ;
2020-09-16 18:46:34 +02:00
const age = fullyConnectedLayer ( pooled , params . fc . age ) . as1D ( ) ;
const gender = fullyConnectedLayer ( pooled , params . fc . gender ) ;
return { age , gender } ;
} ) ;
}
forwardInput ( input ) {
2020-10-09 03:31:31 +02:00
return tidy14 ( ( ) => {
2020-09-16 18:46:34 +02:00
const { age , gender } = this . runNet ( input ) ;
2020-10-09 03:31:31 +02:00
return { age , gender : softmax2 ( gender ) } ;
2020-09-16 18:46:34 +02:00
} ) ;
}
async forward ( input ) {
return this . forwardInput ( await toNetInput ( input ) ) ;
}
async predictAgeAndGender ( input ) {
const netInput = await toNetInput ( input ) ;
const out = await this . forwardInput ( netInput ) ;
2020-10-09 03:31:31 +02:00
const ages = unstack2 ( out . age ) ;
const genders = unstack2 ( out . gender ) ;
2020-09-16 18:46:34 +02:00
const ageAndGenderTensors = ages . map ( ( ageTensor , i ) => ( {
ageTensor ,
genderTensor : genders [ i ]
} ) ) ;
const predictionsByBatch = await Promise . all ( ageAndGenderTensors . map ( async ( { ageTensor , genderTensor } ) => {
const age = ( await ageTensor . data ( ) ) [ 0 ] ;
const probMale = ( await genderTensor . data ( ) ) [ 0 ] ;
const isMale = probMale > 0.5 ;
const gender = isMale ? Gender . MALE : Gender . FEMALE ;
const genderProbability = isMale ? probMale : 1 - probMale ;
ageTensor . dispose ( ) ;
genderTensor . dispose ( ) ;
return { age , gender , genderProbability } ;
} ) ) ;
out . age . dispose ( ) ;
out . gender . dispose ( ) ;
return netInput . isBatchInput ? predictionsByBatch : predictionsByBatch [ 0 ] ;
}
getDefaultModelName ( ) {
return "age_gender_model" ;
}
dispose ( throwOnRedispose = true ) {
this . faceFeatureExtractor . dispose ( throwOnRedispose ) ;
super . dispose ( throwOnRedispose ) ;
}
loadClassifierParams ( weights ) {
const { params , paramMappings } = this . extractClassifierParams ( weights ) ;
this . _params = params ;
this . _paramMappings = paramMappings ;
}
extractClassifierParams ( weights ) {
return extractParams7 ( weights ) ;
}
extractParamsFromWeigthMap ( weightMap ) {
const { featureExtractorMap , classifierMap } = seperateWeightMaps ( weightMap ) ;
this . faceFeatureExtractor . loadFromWeightMap ( featureExtractorMap ) ;
return extractParamsFromWeigthMap7 ( classifierMap ) ;
}
extractParams ( weights ) {
const classifierWeightSize = 512 * 1 + 1 + ( 512 * 2 + 2 ) ;
const featureExtractorWeights = weights . slice ( 0 , weights . length - classifierWeightSize ) ;
const classifierWeights = weights . slice ( weights . length - classifierWeightSize ) ;
this . faceFeatureExtractor . extractWeights ( featureExtractorWeights ) ;
return this . extractClassifierParams ( classifierWeights ) ;
}
}
// build/src/ageGenderNet/index.js
// build/src/faceLandmarkNet/FaceLandmark68NetBase.js
2020-10-09 03:31:31 +02:00
import {
fill as fill3 ,
stack as stack2 ,
tidy as tidy15 ,
unstack as unstack3
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
class FaceLandmark68NetBase extends FaceProcessor {
postProcess ( output , inputSize , originalDimensions ) {
const inputDimensions = originalDimensions . map ( ( { width , height } ) => {
const scale2 = inputSize / Math . max ( height , width ) ;
return {
width : width * scale2 ,
height : height * scale2
} ;
} ) ;
const batchSize = inputDimensions . length ;
2020-10-09 03:31:31 +02:00
return tidy15 ( ( ) => {
const createInterleavedTensor = ( fillX , fillY ) => stack2 ( [
fill3 ( [ 68 ] , fillX ) ,
fill3 ( [ 68 ] , fillY )
2020-09-16 18:46:34 +02:00
] , 1 ) . as2D ( 1 , 136 ) . as1D ( ) ;
const getPadding = ( batchIdx , cond ) => {
const { width , height } = inputDimensions [ batchIdx ] ;
return cond ( width , height ) ? Math . abs ( width - height ) / 2 : 0 ;
} ;
const getPaddingX = ( batchIdx ) => getPadding ( batchIdx , ( w , h ) => w < h ) ;
const getPaddingY = ( batchIdx ) => getPadding ( batchIdx , ( w , h ) => h < w ) ;
2020-10-09 03:31:31 +02:00
const landmarkTensors = output . mul ( fill3 ( [ batchSize , 136 ] , inputSize ) ) . sub ( stack2 ( Array . from ( Array ( batchSize ) , ( _ , batchIdx ) => createInterleavedTensor ( getPaddingX ( batchIdx ) , getPaddingY ( batchIdx ) ) ) ) ) . div ( stack2 ( Array . from ( Array ( batchSize ) , ( _ , batchIdx ) => createInterleavedTensor ( inputDimensions [ batchIdx ] . width , inputDimensions [ batchIdx ] . height ) ) ) ) ;
2020-09-16 18:46:34 +02:00
return landmarkTensors ;
} ) ;
}
forwardInput ( input ) {
2020-10-09 03:31:31 +02:00
return tidy15 ( ( ) => {
2020-09-16 18:46:34 +02:00
const out = this . runNet ( input ) ;
return this . postProcess ( out , input . inputSize , input . inputDimensions . map ( ( [ height , width ] ) => ( { height , width } ) ) ) ;
} ) ;
}
async forward ( input ) {
return this . forwardInput ( await toNetInput ( input ) ) ;
}
async detectLandmarks ( input ) {
const netInput = await toNetInput ( input ) ;
2020-10-09 03:31:31 +02:00
const landmarkTensors = tidy15 ( ( ) => unstack3 ( this . forwardInput ( netInput ) ) ) ;
2020-09-16 18:46:34 +02:00
const landmarksForBatch = await Promise . all ( landmarkTensors . map ( async ( landmarkTensor , batchIdx ) => {
const landmarksArray = Array . from ( await landmarkTensor . data ( ) ) ;
const xCoords = landmarksArray . filter ( ( _ , i ) => isEven ( i ) ) ;
const yCoords = landmarksArray . filter ( ( _ , i ) => ! isEven ( i ) ) ;
return new FaceLandmarks68 ( Array ( 68 ) . fill ( 0 ) . map ( ( _ , i ) => new Point ( xCoords [ i ] , yCoords [ i ] ) ) , {
height : netInput . getInputHeight ( batchIdx ) ,
width : netInput . getInputWidth ( batchIdx )
} ) ;
} ) ) ;
landmarkTensors . forEach ( ( t ) => t . dispose ( ) ) ;
return netInput . isBatchInput ? landmarksForBatch : landmarksForBatch [ 0 ] ;
}
getClassifierChannelsOut ( ) {
return 136 ;
}
}
// build/src/faceLandmarkNet/FaceLandmark68Net.js
class FaceLandmark68Net extends FaceLandmark68NetBase {
constructor ( faceFeatureExtractor = new FaceFeatureExtractor ( ) ) {
super ( "FaceLandmark68Net" , faceFeatureExtractor ) ;
}
getDefaultModelName ( ) {
return "face_landmark_68_model" ;
}
getClassifierChannelsIn ( ) {
return 256 ;
}
}
// build/src/faceFeatureExtractor/extractParamsFromWeigthMapTiny.js
function extractParamsFromWeigthMapTiny ( weightMap ) {
const paramMappings = [ ] ;
const { extractDenseBlock3Params } = loadParamsFactory ( weightMap , paramMappings ) ;
const params = {
dense0 : extractDenseBlock3Params ( "dense0" , true ) ,
dense1 : extractDenseBlock3Params ( "dense1" ) ,
dense2 : extractDenseBlock3Params ( "dense2" )
} ;
disposeUnusedWeightTensors ( weightMap , paramMappings ) ;
return { params , paramMappings } ;
}
// build/src/faceFeatureExtractor/extractParamsTiny.js
function extractParamsTiny ( weights ) {
const paramMappings = [ ] ;
const { extractWeights , getRemainingWeights } = extractWeightsFactory ( weights ) ;
const { extractDenseBlock3Params } = extractorsFactory ( extractWeights , paramMappings ) ;
const dense0 = extractDenseBlock3Params ( 3 , 32 , "dense0" , true ) ;
const dense1 = extractDenseBlock3Params ( 32 , 64 , "dense1" ) ;
const dense2 = extractDenseBlock3Params ( 64 , 128 , "dense2" ) ;
if ( getRemainingWeights ( ) . length !== 0 ) {
throw new Error ( ` weights remaing after extract: ${ getRemainingWeights ( ) . length } ` ) ;
}
return {
paramMappings ,
params : { dense0 , dense1 , dense2 }
} ;
}
// build/src/faceFeatureExtractor/TinyFaceFeatureExtractor.js
2020-10-09 03:31:31 +02:00
import {
avgPool as avgPool3 ,
scalar as scalar3 ,
tidy as tidy16
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
class TinyFaceFeatureExtractor extends NeuralNetwork {
constructor ( ) {
super ( "TinyFaceFeatureExtractor" ) ;
}
forwardInput ( input ) {
const { params } = this ;
if ( ! params ) {
throw new Error ( "TinyFaceFeatureExtractor - load model before inference" ) ;
}
2020-10-09 03:31:31 +02:00
return tidy16 ( ( ) => {
2020-09-16 18:46:34 +02:00
const batchTensor = input . toBatchTensor ( 112 , true ) ;
const meanRgb = [ 122.782 , 117.001 , 104.298 ] ;
2020-10-09 03:31:31 +02:00
const normalized = normalize ( batchTensor , meanRgb ) . div ( scalar3 ( 255 ) ) ;
2020-09-16 18:46:34 +02:00
let out = denseBlock3 ( normalized , params . dense0 , true ) ;
out = denseBlock3 ( out , params . dense1 ) ;
out = denseBlock3 ( out , params . dense2 ) ;
2020-10-09 03:31:31 +02:00
out = avgPool3 ( out , [ 14 , 14 ] , [ 2 , 2 ] , "valid" ) ;
2020-09-16 18:46:34 +02:00
return out ;
} ) ;
}
async forward ( input ) {
return this . forwardInput ( await toNetInput ( input ) ) ;
}
getDefaultModelName ( ) {
return "face_feature_extractor_tiny_model" ;
}
extractParamsFromWeigthMap ( weightMap ) {
return extractParamsFromWeigthMapTiny ( weightMap ) ;
}
extractParams ( weights ) {
return extractParamsTiny ( weights ) ;
}
}
// build/src/faceLandmarkNet/FaceLandmark68TinyNet.js
class FaceLandmark68TinyNet extends FaceLandmark68NetBase {
constructor ( faceFeatureExtractor = new TinyFaceFeatureExtractor ( ) ) {
super ( "FaceLandmark68TinyNet" , faceFeatureExtractor ) ;
}
getDefaultModelName ( ) {
return "face_landmark_68_tiny_model" ;
}
getClassifierChannelsIn ( ) {
return 128 ;
}
}
// build/src/faceLandmarkNet/index.js
class FaceLandmarkNet extends FaceLandmark68Net {
}
// build/src/faceRecognitionNet/scaleLayer.js
2020-10-09 03:31:31 +02:00
import {
add as add6 ,
mul
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function scale ( x , params ) {
2020-10-09 03:31:31 +02:00
return add6 ( mul ( x , params . weights ) , params . biases ) ;
2020-09-16 18:46:34 +02:00
}
// build/src/faceRecognitionNet/convLayer.js
2020-10-09 03:31:31 +02:00
import {
add as add7 ,
conv2d as conv2d4 ,
relu as relu4
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function convLayer2 ( x , params , strides , withRelu , padding = "same" ) {
const { filters , bias } = params . conv ;
2020-10-09 03:31:31 +02:00
let out = conv2d4 ( x , filters , strides , padding ) ;
out = add7 ( out , bias ) ;
2020-09-16 18:46:34 +02:00
out = scale ( out , params . scale ) ;
2020-10-09 03:31:31 +02:00
return withRelu ? relu4 ( out ) : out ;
2020-09-16 18:46:34 +02:00
}
function conv2 ( x , params ) {
return convLayer2 ( x , params , [ 1 , 1 ] , true ) ;
}
function convNoRelu ( x , params ) {
return convLayer2 ( x , params , [ 1 , 1 ] , false ) ;
}
function convDown ( x , params ) {
return convLayer2 ( x , params , [ 2 , 2 ] , true , "valid" ) ;
}
// build/src/faceRecognitionNet/extractParams.js
2020-10-09 03:31:31 +02:00
import {
tensor1d as tensor1d4 ,
tensor2d as tensor2d2 ,
tensor4d as tensor4d3 ,
tidy as tidy17 ,
transpose
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function extractorsFactory5 ( extractWeights , paramMappings ) {
function extractFilterValues ( numFilterValues , numFilters , filterSize ) {
const weights = extractWeights ( numFilterValues ) ;
const depth = weights . length / ( numFilters * filterSize * filterSize ) ;
if ( isFloat ( depth ) ) {
throw new Error ( ` depth has to be an integer: ${ depth } , weights.length: ${ weights . length } , numFilters: ${ numFilters } , filterSize: ${ filterSize } ` ) ;
}
2020-10-09 03:31:31 +02:00
return tidy17 ( ( ) => transpose ( tensor4d3 ( weights , [ numFilters , depth , filterSize , filterSize ] ) , [ 2 , 3 , 1 , 0 ] ) ) ;
2020-09-16 18:46:34 +02:00
}
function extractConvParams ( numFilterValues , numFilters , filterSize , mappedPrefix ) {
const filters = extractFilterValues ( numFilterValues , numFilters , filterSize ) ;
2020-10-09 03:31:31 +02:00
const bias = tensor1d4 ( extractWeights ( numFilters ) ) ;
2020-09-16 18:46:34 +02:00
paramMappings . push ( { paramPath : ` ${ mappedPrefix } /filters ` } , { paramPath : ` ${ mappedPrefix } /bias ` } ) ;
return { filters , bias } ;
}
function extractScaleLayerParams ( numWeights , mappedPrefix ) {
2020-10-09 03:31:31 +02:00
const weights = tensor1d4 ( extractWeights ( numWeights ) ) ;
const biases = tensor1d4 ( extractWeights ( numWeights ) ) ;
2020-09-16 18:46:34 +02:00
paramMappings . push ( { paramPath : ` ${ mappedPrefix } /weights ` } , { paramPath : ` ${ mappedPrefix } /biases ` } ) ;
return {
weights ,
biases
} ;
}
function extractConvLayerParams ( numFilterValues , numFilters , filterSize , mappedPrefix ) {
const conv3 = extractConvParams ( numFilterValues , numFilters , filterSize , ` ${ mappedPrefix } /conv ` ) ;
const scale2 = extractScaleLayerParams ( numFilters , ` ${ mappedPrefix } /scale ` ) ;
return { conv : conv3 , scale : scale2 } ;
}
function extractResidualLayerParams ( numFilterValues , numFilters , filterSize , mappedPrefix , isDown = false ) {
const conv1 = extractConvLayerParams ( ( isDown ? 0.5 : 1 ) * numFilterValues , numFilters , filterSize , ` ${ mappedPrefix } /conv1 ` ) ;
const conv22 = extractConvLayerParams ( numFilterValues , numFilters , filterSize , ` ${ mappedPrefix } /conv2 ` ) ;
return { conv1 , conv2 : conv22 } ;
}
return {
extractConvLayerParams ,
extractResidualLayerParams
} ;
}
function extractParams9 ( weights ) {
const { extractWeights , getRemainingWeights } = extractWeightsFactory ( weights ) ;
const paramMappings = [ ] ;
const { extractConvLayerParams , extractResidualLayerParams } = extractorsFactory5 ( extractWeights , paramMappings ) ;
const conv32 _down = extractConvLayerParams ( 4704 , 32 , 7 , "conv32_down" ) ;
const conv32 _1 = extractResidualLayerParams ( 9216 , 32 , 3 , "conv32_1" ) ;
const conv32 _2 = extractResidualLayerParams ( 9216 , 32 , 3 , "conv32_2" ) ;
const conv32 _3 = extractResidualLayerParams ( 9216 , 32 , 3 , "conv32_3" ) ;
const conv64 _down = extractResidualLayerParams ( 36864 , 64 , 3 , "conv64_down" , true ) ;
const conv64 _1 = extractResidualLayerParams ( 36864 , 64 , 3 , "conv64_1" ) ;
const conv64 _2 = extractResidualLayerParams ( 36864 , 64 , 3 , "conv64_2" ) ;
const conv64 _3 = extractResidualLayerParams ( 36864 , 64 , 3 , "conv64_3" ) ;
const conv128 _down = extractResidualLayerParams ( 147456 , 128 , 3 , "conv128_down" , true ) ;
const conv128 _1 = extractResidualLayerParams ( 147456 , 128 , 3 , "conv128_1" ) ;
const conv128 _2 = extractResidualLayerParams ( 147456 , 128 , 3 , "conv128_2" ) ;
const conv256 _down = extractResidualLayerParams ( 589824 , 256 , 3 , "conv256_down" , true ) ;
const conv256 _1 = extractResidualLayerParams ( 589824 , 256 , 3 , "conv256_1" ) ;
const conv256 _2 = extractResidualLayerParams ( 589824 , 256 , 3 , "conv256_2" ) ;
const conv256 _down _out = extractResidualLayerParams ( 589824 , 256 , 3 , "conv256_down_out" ) ;
2020-10-09 03:31:31 +02:00
const fc = tidy17 ( ( ) => transpose ( tensor2d2 ( extractWeights ( 256 * 128 ) , [ 128 , 256 ] ) , [ 1 , 0 ] ) ) ;
2020-09-16 18:46:34 +02:00
paramMappings . push ( { paramPath : ` fc ` } ) ;
if ( getRemainingWeights ( ) . length !== 0 ) {
throw new Error ( ` weights remaing after extract: ${ getRemainingWeights ( ) . length } ` ) ;
}
const params = {
conv32 _down ,
conv32 _1 ,
conv32 _2 ,
conv32 _3 ,
conv64 _down ,
conv64 _1 ,
conv64 _2 ,
conv64 _3 ,
conv128 _down ,
conv128 _1 ,
conv128 _2 ,
conv256 _down ,
conv256 _1 ,
conv256 _2 ,
conv256 _down _out ,
fc
} ;
return { params , paramMappings } ;
}
// build/src/faceRecognitionNet/extractParamsFromWeigthMap.js
function extractorsFactory6 ( weightMap , paramMappings ) {
const extractWeightEntry = extractWeightEntryFactory ( weightMap , paramMappings ) ;
function extractScaleLayerParams ( prefix ) {
const weights = extractWeightEntry ( ` ${ prefix } /scale/weights ` , 1 ) ;
const biases = extractWeightEntry ( ` ${ prefix } /scale/biases ` , 1 ) ;
return { weights , biases } ;
}
function extractConvLayerParams ( prefix ) {
const filters = extractWeightEntry ( ` ${ prefix } /conv/filters ` , 4 ) ;
const bias = extractWeightEntry ( ` ${ prefix } /conv/bias ` , 1 ) ;
const scale2 = extractScaleLayerParams ( prefix ) ;
return { conv : { filters , bias } , scale : scale2 } ;
}
function extractResidualLayerParams ( prefix ) {
return {
conv1 : extractConvLayerParams ( ` ${ prefix } /conv1 ` ) ,
conv2 : extractConvLayerParams ( ` ${ prefix } /conv2 ` )
} ;
}
return {
extractConvLayerParams ,
extractResidualLayerParams
} ;
}
function extractParamsFromWeigthMap9 ( weightMap ) {
const paramMappings = [ ] ;
const { extractConvLayerParams , extractResidualLayerParams } = extractorsFactory6 ( weightMap , paramMappings ) ;
const conv32 _down = extractConvLayerParams ( "conv32_down" ) ;
const conv32 _1 = extractResidualLayerParams ( "conv32_1" ) ;
const conv32 _2 = extractResidualLayerParams ( "conv32_2" ) ;
const conv32 _3 = extractResidualLayerParams ( "conv32_3" ) ;
const conv64 _down = extractResidualLayerParams ( "conv64_down" ) ;
const conv64 _1 = extractResidualLayerParams ( "conv64_1" ) ;
const conv64 _2 = extractResidualLayerParams ( "conv64_2" ) ;
const conv64 _3 = extractResidualLayerParams ( "conv64_3" ) ;
const conv128 _down = extractResidualLayerParams ( "conv128_down" ) ;
const conv128 _1 = extractResidualLayerParams ( "conv128_1" ) ;
const conv128 _2 = extractResidualLayerParams ( "conv128_2" ) ;
const conv256 _down = extractResidualLayerParams ( "conv256_down" ) ;
const conv256 _1 = extractResidualLayerParams ( "conv256_1" ) ;
const conv256 _2 = extractResidualLayerParams ( "conv256_2" ) ;
const conv256 _down _out = extractResidualLayerParams ( "conv256_down_out" ) ;
const fc = weightMap [ "fc" ] ;
paramMappings . push ( { originalPath : "fc" , paramPath : "fc" } ) ;
if ( ! isTensor2D ( fc ) ) {
throw new Error ( ` expected weightMap[fc] to be a Tensor2D, instead have ${ fc } ` ) ;
}
const params = {
conv32 _down ,
conv32 _1 ,
conv32 _2 ,
conv32 _3 ,
conv64 _down ,
conv64 _1 ,
conv64 _2 ,
conv64 _3 ,
conv128 _down ,
conv128 _1 ,
conv128 _2 ,
conv256 _down ,
conv256 _1 ,
conv256 _2 ,
conv256 _down _out ,
fc
} ;
disposeUnusedWeightTensors ( weightMap , paramMappings ) ;
return { params , paramMappings } ;
}
// build/src/faceRecognitionNet/residualLayer.js
2020-10-09 03:31:31 +02:00
import {
add as add8 ,
avgPool as avgPool4 ,
concat as concat3 ,
relu as relu5 ,
zeros
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function residual ( x , params ) {
let out = conv2 ( x , params . conv1 ) ;
out = convNoRelu ( out , params . conv2 ) ;
2020-10-09 03:31:31 +02:00
out = add8 ( out , x ) ;
out = relu5 ( out ) ;
2020-09-16 18:46:34 +02:00
return out ;
}
function residualDown ( x , params ) {
let out = convDown ( x , params . conv1 ) ;
out = convNoRelu ( out , params . conv2 ) ;
2020-10-09 03:31:31 +02:00
let pooled = avgPool4 ( x , 2 , 2 , "valid" ) ;
const zeros2 = zeros ( pooled . shape ) ;
2020-09-16 18:46:34 +02:00
const isPad = pooled . shape [ 3 ] !== out . shape [ 3 ] ;
const isAdjustShape = pooled . shape [ 1 ] !== out . shape [ 1 ] || pooled . shape [ 2 ] !== out . shape [ 2 ] ;
if ( isAdjustShape ) {
const padShapeX = [ ... out . shape ] ;
padShapeX [ 1 ] = 1 ;
const zerosW = zeros ( padShapeX ) ;
2020-10-09 03:31:31 +02:00
out = concat3 ( [ out , zerosW ] , 1 ) ;
2020-09-16 18:46:34 +02:00
const padShapeY = [ ... out . shape ] ;
padShapeY [ 2 ] = 1 ;
const zerosH = zeros ( padShapeY ) ;
2020-10-09 03:31:31 +02:00
out = concat3 ( [ out , zerosH ] , 2 ) ;
2020-09-16 18:46:34 +02:00
}
2020-10-09 03:31:31 +02:00
pooled = isPad ? concat3 ( [ pooled , zeros2 ] , 3 ) : pooled ;
out = add8 ( pooled , out ) ;
out = relu5 ( out ) ;
2020-09-16 18:46:34 +02:00
return out ;
}
// build/src/faceRecognitionNet/FaceRecognitionNet.js
2020-10-09 03:31:31 +02:00
import {
cast as cast3 ,
matMul as matMul2 ,
maxPool as maxPool2 ,
scalar as scalar4 ,
tidy as tidy18 ,
unstack as unstack4
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
class FaceRecognitionNet extends NeuralNetwork {
constructor ( ) {
super ( "FaceRecognitionNet" ) ;
}
forwardInput ( input ) {
const { params } = this ;
if ( ! params ) {
throw new Error ( "FaceRecognitionNet - load model before inference" ) ;
}
2020-10-09 03:31:31 +02:00
return tidy18 ( ( ) => {
const batchTensor = cast3 ( input . toBatchTensor ( 150 , true ) , "float32" ) ;
2020-09-16 18:46:34 +02:00
const meanRgb = [ 122.782 , 117.001 , 104.298 ] ;
2020-10-09 03:31:31 +02:00
const normalized = normalize ( batchTensor , meanRgb ) . div ( scalar4 ( 256 ) ) ;
2020-09-16 18:46:34 +02:00
let out = convDown ( normalized , params . conv32 _down ) ;
2020-10-09 03:31:31 +02:00
out = maxPool2 ( out , 3 , 2 , "valid" ) ;
2020-09-16 18:46:34 +02:00
out = residual ( out , params . conv32 _1 ) ;
out = residual ( out , params . conv32 _2 ) ;
out = residual ( out , params . conv32 _3 ) ;
out = residualDown ( out , params . conv64 _down ) ;
out = residual ( out , params . conv64 _1 ) ;
out = residual ( out , params . conv64 _2 ) ;
out = residual ( out , params . conv64 _3 ) ;
out = residualDown ( out , params . conv128 _down ) ;
out = residual ( out , params . conv128 _1 ) ;
out = residual ( out , params . conv128 _2 ) ;
out = residualDown ( out , params . conv256 _down ) ;
out = residual ( out , params . conv256 _1 ) ;
out = residual ( out , params . conv256 _2 ) ;
out = residualDown ( out , params . conv256 _down _out ) ;
const globalAvg = out . mean ( [ 1 , 2 ] ) ;
2020-10-09 03:31:31 +02:00
const fullyConnected = matMul2 ( globalAvg , params . fc ) ;
2020-09-16 18:46:34 +02:00
return fullyConnected ;
} ) ;
}
async forward ( input ) {
return this . forwardInput ( await toNetInput ( input ) ) ;
}
async computeFaceDescriptor ( input ) {
const netInput = await toNetInput ( input ) ;
2020-10-09 03:31:31 +02:00
const faceDescriptorTensors = tidy18 ( ( ) => unstack4 ( this . forwardInput ( netInput ) ) ) ;
2020-09-16 18:46:34 +02:00
const faceDescriptorsForBatch = await Promise . all ( faceDescriptorTensors . map ( ( t ) => t . data ( ) ) ) ;
faceDescriptorTensors . forEach ( ( t ) => t . dispose ( ) ) ;
return netInput . isBatchInput ? faceDescriptorsForBatch : faceDescriptorsForBatch [ 0 ] ;
}
getDefaultModelName ( ) {
return "face_recognition_model" ;
}
extractParamsFromWeigthMap ( weightMap ) {
return extractParamsFromWeigthMap9 ( weightMap ) ;
}
extractParams ( weights ) {
return extractParams9 ( weights ) ;
}
}
// build/src/faceRecognitionNet/index.js
function createFaceRecognitionNet ( weights ) {
const net = new FaceRecognitionNet ( ) ;
net . extractWeights ( weights ) ;
return net ;
}
// build/src/factories/WithFaceDescriptor.js
function extendWithFaceDescriptor ( sourceObj , descriptor ) {
const extension = { descriptor } ;
return Object . assign ( { } , sourceObj , extension ) ;
}
// build/src/factories/WithAge.js
function isWithAge ( obj ) {
return typeof obj [ "age" ] === "number" ;
}
function extendWithAge ( sourceObj , age ) {
const extension = { age } ;
return Object . assign ( { } , sourceObj , extension ) ;
}
// build/src/factories/WithGender.js
function isWithGender ( obj ) {
return ( obj [ "gender" ] === Gender . MALE || obj [ "gender" ] === Gender . FEMALE ) && isValidProbablitiy ( obj [ "genderProbability" ] ) ;
}
function extendWithGender ( sourceObj , gender , genderProbability ) {
const extension = { gender , genderProbability } ;
return Object . assign ( { } , sourceObj , extension ) ;
}
// build/src/factories/index.js
// build/src/ssdMobilenetv1/extractParams.js
2020-10-09 03:31:31 +02:00
import {
tensor1d as tensor1d5 ,
tensor3d ,
tensor4d as tensor4d4
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function extractorsFactory7 ( extractWeights , paramMappings ) {
function extractDepthwiseConvParams ( numChannels , mappedPrefix ) {
2020-10-09 03:31:31 +02:00
const filters = tensor4d4 ( extractWeights ( 3 * 3 * numChannels ) , [ 3 , 3 , numChannels , 1 ] ) ;
const batch _norm _scale = tensor1d5 ( extractWeights ( numChannels ) ) ;
const batch _norm _offset = tensor1d5 ( extractWeights ( numChannels ) ) ;
const batch _norm _mean = tensor1d5 ( extractWeights ( numChannels ) ) ;
const batch _norm _variance = tensor1d5 ( extractWeights ( numChannels ) ) ;
2020-09-16 18:46:34 +02:00
paramMappings . push ( { paramPath : ` ${ mappedPrefix } /filters ` } , { paramPath : ` ${ mappedPrefix } /batch_norm_scale ` } , { paramPath : ` ${ mappedPrefix } /batch_norm_offset ` } , { paramPath : ` ${ mappedPrefix } /batch_norm_mean ` } , { paramPath : ` ${ mappedPrefix } /batch_norm_variance ` } ) ;
return {
filters ,
batch _norm _scale ,
batch _norm _offset ,
batch _norm _mean ,
batch _norm _variance
} ;
}
function extractConvParams ( channelsIn , channelsOut , filterSize , mappedPrefix , isPointwiseConv ) {
2020-10-09 03:31:31 +02:00
const filters = tensor4d4 ( extractWeights ( channelsIn * channelsOut * filterSize * filterSize ) , [ filterSize , filterSize , channelsIn , channelsOut ] ) ;
const bias = tensor1d5 ( extractWeights ( channelsOut ) ) ;
2020-09-16 18:46:34 +02:00
paramMappings . push ( { paramPath : ` ${ mappedPrefix } /filters ` } , { paramPath : ` ${ mappedPrefix } / ${ isPointwiseConv ? "batch_norm_offset" : "bias" } ` } ) ;
return { filters , bias } ;
}
function extractPointwiseConvParams ( channelsIn , channelsOut , filterSize , mappedPrefix ) {
const { filters , bias } = extractConvParams ( channelsIn , channelsOut , filterSize , mappedPrefix , true ) ;
return {
filters ,
batch _norm _offset : bias
} ;
}
function extractConvPairParams ( channelsIn , channelsOut , mappedPrefix ) {
const depthwise _conv = extractDepthwiseConvParams ( channelsIn , ` ${ mappedPrefix } /depthwise_conv ` ) ;
const pointwise _conv = extractPointwiseConvParams ( channelsIn , channelsOut , 1 , ` ${ mappedPrefix } /pointwise_conv ` ) ;
return { depthwise _conv , pointwise _conv } ;
}
function extractMobilenetV1Params ( ) {
const conv _0 = extractPointwiseConvParams ( 3 , 32 , 3 , "mobilenetv1/conv_0" ) ;
const conv _1 = extractConvPairParams ( 32 , 64 , "mobilenetv1/conv_1" ) ;
const conv _2 = extractConvPairParams ( 64 , 128 , "mobilenetv1/conv_2" ) ;
const conv _3 = extractConvPairParams ( 128 , 128 , "mobilenetv1/conv_3" ) ;
const conv _4 = extractConvPairParams ( 128 , 256 , "mobilenetv1/conv_4" ) ;
const conv _5 = extractConvPairParams ( 256 , 256 , "mobilenetv1/conv_5" ) ;
const conv _6 = extractConvPairParams ( 256 , 512 , "mobilenetv1/conv_6" ) ;
const conv _7 = extractConvPairParams ( 512 , 512 , "mobilenetv1/conv_7" ) ;
const conv _8 = extractConvPairParams ( 512 , 512 , "mobilenetv1/conv_8" ) ;
const conv _9 = extractConvPairParams ( 512 , 512 , "mobilenetv1/conv_9" ) ;
const conv _10 = extractConvPairParams ( 512 , 512 , "mobilenetv1/conv_10" ) ;
const conv _11 = extractConvPairParams ( 512 , 512 , "mobilenetv1/conv_11" ) ;
const conv _12 = extractConvPairParams ( 512 , 1024 , "mobilenetv1/conv_12" ) ;
const conv _13 = extractConvPairParams ( 1024 , 1024 , "mobilenetv1/conv_13" ) ;
return {
conv _0 ,
conv _1 ,
conv _2 ,
conv _3 ,
conv _4 ,
conv _5 ,
conv _6 ,
conv _7 ,
conv _8 ,
conv _9 ,
conv _10 ,
conv _11 ,
conv _12 ,
conv _13
} ;
}
function extractPredictionLayerParams ( ) {
const conv _0 = extractPointwiseConvParams ( 1024 , 256 , 1 , "prediction_layer/conv_0" ) ;
const conv _1 = extractPointwiseConvParams ( 256 , 512 , 3 , "prediction_layer/conv_1" ) ;
const conv _2 = extractPointwiseConvParams ( 512 , 128 , 1 , "prediction_layer/conv_2" ) ;
const conv _3 = extractPointwiseConvParams ( 128 , 256 , 3 , "prediction_layer/conv_3" ) ;
const conv _4 = extractPointwiseConvParams ( 256 , 128 , 1 , "prediction_layer/conv_4" ) ;
const conv _5 = extractPointwiseConvParams ( 128 , 256 , 3 , "prediction_layer/conv_5" ) ;
const conv _6 = extractPointwiseConvParams ( 256 , 64 , 1 , "prediction_layer/conv_6" ) ;
const conv _7 = extractPointwiseConvParams ( 64 , 128 , 3 , "prediction_layer/conv_7" ) ;
const box _encoding _0 _predictor = extractConvParams ( 512 , 12 , 1 , "prediction_layer/box_predictor_0/box_encoding_predictor" ) ;
const class _predictor _0 = extractConvParams ( 512 , 9 , 1 , "prediction_layer/box_predictor_0/class_predictor" ) ;
const box _encoding _1 _predictor = extractConvParams ( 1024 , 24 , 1 , "prediction_layer/box_predictor_1/box_encoding_predictor" ) ;
const class _predictor _1 = extractConvParams ( 1024 , 18 , 1 , "prediction_layer/box_predictor_1/class_predictor" ) ;
const box _encoding _2 _predictor = extractConvParams ( 512 , 24 , 1 , "prediction_layer/box_predictor_2/box_encoding_predictor" ) ;
const class _predictor _2 = extractConvParams ( 512 , 18 , 1 , "prediction_layer/box_predictor_2/class_predictor" ) ;
const box _encoding _3 _predictor = extractConvParams ( 256 , 24 , 1 , "prediction_layer/box_predictor_3/box_encoding_predictor" ) ;
const class _predictor _3 = extractConvParams ( 256 , 18 , 1 , "prediction_layer/box_predictor_3/class_predictor" ) ;
const box _encoding _4 _predictor = extractConvParams ( 256 , 24 , 1 , "prediction_layer/box_predictor_4/box_encoding_predictor" ) ;
const class _predictor _4 = extractConvParams ( 256 , 18 , 1 , "prediction_layer/box_predictor_4/class_predictor" ) ;
const box _encoding _5 _predictor = extractConvParams ( 128 , 24 , 1 , "prediction_layer/box_predictor_5/box_encoding_predictor" ) ;
const class _predictor _5 = extractConvParams ( 128 , 18 , 1 , "prediction_layer/box_predictor_5/class_predictor" ) ;
const box _predictor _0 = {
box _encoding _predictor : box _encoding _0 _predictor ,
class _predictor : class _predictor _0
} ;
const box _predictor _1 = {
box _encoding _predictor : box _encoding _1 _predictor ,
class _predictor : class _predictor _1
} ;
const box _predictor _2 = {
box _encoding _predictor : box _encoding _2 _predictor ,
class _predictor : class _predictor _2
} ;
const box _predictor _3 = {
box _encoding _predictor : box _encoding _3 _predictor ,
class _predictor : class _predictor _3
} ;
const box _predictor _4 = {
box _encoding _predictor : box _encoding _4 _predictor ,
class _predictor : class _predictor _4
} ;
const box _predictor _5 = {
box _encoding _predictor : box _encoding _5 _predictor ,
class _predictor : class _predictor _5
} ;
return {
conv _0 ,
conv _1 ,
conv _2 ,
conv _3 ,
conv _4 ,
conv _5 ,
conv _6 ,
conv _7 ,
box _predictor _0 ,
box _predictor _1 ,
box _predictor _2 ,
box _predictor _3 ,
box _predictor _4 ,
box _predictor _5
} ;
}
return {
extractMobilenetV1Params ,
extractPredictionLayerParams
} ;
}
function extractParams11 ( weights ) {
const paramMappings = [ ] ;
const { extractWeights , getRemainingWeights } = extractWeightsFactory ( weights ) ;
const { extractMobilenetV1Params , extractPredictionLayerParams } = extractorsFactory7 ( extractWeights , paramMappings ) ;
const mobilenetv1 = extractMobilenetV1Params ( ) ;
const prediction _layer = extractPredictionLayerParams ( ) ;
const extra _dim = tensor3d ( extractWeights ( 5118 * 4 ) , [ 1 , 5118 , 4 ] ) ;
const output _layer = {
extra _dim
} ;
paramMappings . push ( { paramPath : "output_layer/extra_dim" } ) ;
if ( getRemainingWeights ( ) . length !== 0 ) {
throw new Error ( ` weights remaing after extract: ${ getRemainingWeights ( ) . length } ` ) ;
}
return {
params : {
mobilenetv1 ,
prediction _layer ,
output _layer
} ,
paramMappings
} ;
}
// build/src/ssdMobilenetv1/extractParamsFromWeigthMap.js
function extractorsFactory8 ( weightMap , paramMappings ) {
const extractWeightEntry = extractWeightEntryFactory ( weightMap , paramMappings ) ;
function extractPointwiseConvParams ( prefix , idx , mappedPrefix ) {
const filters = extractWeightEntry ( ` ${ prefix } /Conv2d_ ${ idx } _pointwise/weights ` , 4 , ` ${ mappedPrefix } /filters ` ) ;
const batch _norm _offset = extractWeightEntry ( ` ${ prefix } /Conv2d_ ${ idx } _pointwise/convolution_bn_offset ` , 1 , ` ${ mappedPrefix } /batch_norm_offset ` ) ;
return { filters , batch _norm _offset } ;
}
function extractConvPairParams ( idx ) {
const mappedPrefix = ` mobilenetv1/conv_ ${ idx } ` ;
const prefixDepthwiseConv = ` MobilenetV1/Conv2d_ ${ idx } _depthwise ` ;
const mappedPrefixDepthwiseConv = ` ${ mappedPrefix } /depthwise_conv ` ;
const mappedPrefixPointwiseConv = ` ${ mappedPrefix } /pointwise_conv ` ;
const filters = extractWeightEntry ( ` ${ prefixDepthwiseConv } /depthwise_weights ` , 4 , ` ${ mappedPrefixDepthwiseConv } /filters ` ) ;
const batch _norm _scale = extractWeightEntry ( ` ${ prefixDepthwiseConv } /BatchNorm/gamma ` , 1 , ` ${ mappedPrefixDepthwiseConv } /batch_norm_scale ` ) ;
const batch _norm _offset = extractWeightEntry ( ` ${ prefixDepthwiseConv } /BatchNorm/beta ` , 1 , ` ${ mappedPrefixDepthwiseConv } /batch_norm_offset ` ) ;
const batch _norm _mean = extractWeightEntry ( ` ${ prefixDepthwiseConv } /BatchNorm/moving_mean ` , 1 , ` ${ mappedPrefixDepthwiseConv } /batch_norm_mean ` ) ;
const batch _norm _variance = extractWeightEntry ( ` ${ prefixDepthwiseConv } /BatchNorm/moving_variance ` , 1 , ` ${ mappedPrefixDepthwiseConv } /batch_norm_variance ` ) ;
return {
depthwise _conv : {
filters ,
batch _norm _scale ,
batch _norm _offset ,
batch _norm _mean ,
batch _norm _variance
} ,
pointwise _conv : extractPointwiseConvParams ( "MobilenetV1" , idx , mappedPrefixPointwiseConv )
} ;
}
function extractMobilenetV1Params ( ) {
return {
conv _0 : extractPointwiseConvParams ( "MobilenetV1" , 0 , "mobilenetv1/conv_0" ) ,
conv _1 : extractConvPairParams ( 1 ) ,
conv _2 : extractConvPairParams ( 2 ) ,
conv _3 : extractConvPairParams ( 3 ) ,
conv _4 : extractConvPairParams ( 4 ) ,
conv _5 : extractConvPairParams ( 5 ) ,
conv _6 : extractConvPairParams ( 6 ) ,
conv _7 : extractConvPairParams ( 7 ) ,
conv _8 : extractConvPairParams ( 8 ) ,
conv _9 : extractConvPairParams ( 9 ) ,
conv _10 : extractConvPairParams ( 10 ) ,
conv _11 : extractConvPairParams ( 11 ) ,
conv _12 : extractConvPairParams ( 12 ) ,
conv _13 : extractConvPairParams ( 13 )
} ;
}
function extractConvParams ( prefix , mappedPrefix ) {
const filters = extractWeightEntry ( ` ${ prefix } /weights ` , 4 , ` ${ mappedPrefix } /filters ` ) ;
const bias = extractWeightEntry ( ` ${ prefix } /biases ` , 1 , ` ${ mappedPrefix } /bias ` ) ;
return { filters , bias } ;
}
function extractBoxPredictorParams ( idx ) {
const box _encoding _predictor = extractConvParams ( ` Prediction/BoxPredictor_ ${ idx } /BoxEncodingPredictor ` , ` prediction_layer/box_predictor_ ${ idx } /box_encoding_predictor ` ) ;
const class _predictor = extractConvParams ( ` Prediction/BoxPredictor_ ${ idx } /ClassPredictor ` , ` prediction_layer/box_predictor_ ${ idx } /class_predictor ` ) ;
return { box _encoding _predictor , class _predictor } ;
}
function extractPredictionLayerParams ( ) {
return {
conv _0 : extractPointwiseConvParams ( "Prediction" , 0 , "prediction_layer/conv_0" ) ,
conv _1 : extractPointwiseConvParams ( "Prediction" , 1 , "prediction_layer/conv_1" ) ,
conv _2 : extractPointwiseConvParams ( "Prediction" , 2 , "prediction_layer/conv_2" ) ,
conv _3 : extractPointwiseConvParams ( "Prediction" , 3 , "prediction_layer/conv_3" ) ,
conv _4 : extractPointwiseConvParams ( "Prediction" , 4 , "prediction_layer/conv_4" ) ,
conv _5 : extractPointwiseConvParams ( "Prediction" , 5 , "prediction_layer/conv_5" ) ,
conv _6 : extractPointwiseConvParams ( "Prediction" , 6 , "prediction_layer/conv_6" ) ,
conv _7 : extractPointwiseConvParams ( "Prediction" , 7 , "prediction_layer/conv_7" ) ,
box _predictor _0 : extractBoxPredictorParams ( 0 ) ,
box _predictor _1 : extractBoxPredictorParams ( 1 ) ,
box _predictor _2 : extractBoxPredictorParams ( 2 ) ,
box _predictor _3 : extractBoxPredictorParams ( 3 ) ,
box _predictor _4 : extractBoxPredictorParams ( 4 ) ,
box _predictor _5 : extractBoxPredictorParams ( 5 )
} ;
}
return {
extractMobilenetV1Params ,
extractPredictionLayerParams
} ;
}
function extractParamsFromWeigthMap11 ( weightMap ) {
const paramMappings = [ ] ;
const { extractMobilenetV1Params , extractPredictionLayerParams } = extractorsFactory8 ( weightMap , paramMappings ) ;
const extra _dim = weightMap [ "Output/extra_dim" ] ;
paramMappings . push ( { originalPath : "Output/extra_dim" , paramPath : "output_layer/extra_dim" } ) ;
if ( ! isTensor3D ( extra _dim ) ) {
throw new Error ( ` expected weightMap['Output/extra_dim'] to be a Tensor3D, instead have ${ extra _dim } ` ) ;
}
const params = {
mobilenetv1 : extractMobilenetV1Params ( ) ,
prediction _layer : extractPredictionLayerParams ( ) ,
output _layer : {
extra _dim
}
} ;
disposeUnusedWeightTensors ( weightMap , paramMappings ) ;
return { params , paramMappings } ;
}
// build/src/ssdMobilenetv1/pointwiseConvLayer.js
2020-10-09 03:31:31 +02:00
import {
add as add9 ,
clipByValue ,
conv2d as conv2d5 ,
tidy as tidy19
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function pointwiseConvLayer ( x , params , strides ) {
2020-10-09 03:31:31 +02:00
return tidy19 ( ( ) => {
let out = conv2d5 ( x , params . filters , strides , "same" ) ;
out = add9 ( out , params . batch _norm _offset ) ;
2020-09-16 18:46:34 +02:00
return clipByValue ( out , 0 , 6 ) ;
} ) ;
}
// build/src/ssdMobilenetv1/mobileNetV1.js
2020-10-09 03:31:31 +02:00
import {
batchNorm ,
clipByValue as clipByValue2 ,
depthwiseConv2d ,
tidy as tidy20
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
const epsilon = 0.0010000000474974513 ;
function depthwiseConvLayer ( x , params , strides ) {
2020-10-09 03:31:31 +02:00
return tidy20 ( ( ) => {
2020-09-16 18:46:34 +02:00
let out = depthwiseConv2d ( x , params . filters , strides , "same" ) ;
out = batchNorm ( out , params . batch _norm _mean , params . batch _norm _variance , params . batch _norm _offset , params . batch _norm _scale , epsilon ) ;
2020-10-09 03:31:31 +02:00
return clipByValue2 ( out , 0 , 6 ) ;
2020-09-16 18:46:34 +02:00
} ) ;
}
function getStridesForLayerIdx ( layerIdx ) {
return [ 2 , 4 , 6 , 12 ] . some ( ( idx ) => idx === layerIdx ) ? [ 2 , 2 ] : [ 1 , 1 ] ;
}
function mobileNetV1 ( x , params ) {
2020-10-09 03:31:31 +02:00
return tidy20 ( ( ) => {
2020-09-16 18:46:34 +02:00
let conv11 ;
let out = pointwiseConvLayer ( x , params . conv _0 , [ 2 , 2 ] ) ;
const convPairParams = [
params . conv _1 ,
params . conv _2 ,
params . conv _3 ,
params . conv _4 ,
params . conv _5 ,
params . conv _6 ,
params . conv _7 ,
params . conv _8 ,
params . conv _9 ,
params . conv _10 ,
params . conv _11 ,
params . conv _12 ,
params . conv _13
] ;
convPairParams . forEach ( ( param , i ) => {
const layerIdx = i + 1 ;
const depthwiseConvStrides = getStridesForLayerIdx ( layerIdx ) ;
out = depthwiseConvLayer ( out , param . depthwise _conv , depthwiseConvStrides ) ;
out = pointwiseConvLayer ( out , param . pointwise _conv , [ 1 , 1 ] ) ;
if ( layerIdx === 11 ) {
conv11 = out ;
}
} ) ;
if ( conv11 === null ) {
throw new Error ( "mobileNetV1 - output of conv layer 11 is null" ) ;
}
return {
out ,
conv11
} ;
} ) ;
}
// build/src/ssdMobilenetv1/nonMaxSuppression.js
2020-10-09 03:31:31 +02:00
function nonMaxSuppression2 ( boxes , scores , maxOutputSize , iouThreshold , scoreThreshold ) {
2020-09-16 18:46:34 +02:00
const numBoxes = boxes . shape [ 0 ] ;
const outputSize = Math . min ( maxOutputSize , numBoxes ) ;
const candidates = scores . map ( ( score , boxIndex ) => ( { score , boxIndex } ) ) . filter ( ( c ) => c . score > scoreThreshold ) . sort ( ( c1 , c2 ) => c2 . score - c1 . score ) ;
const suppressFunc = ( x ) => x <= iouThreshold ? 1 : 0 ;
const selected = [ ] ;
candidates . forEach ( ( c ) => {
if ( selected . length >= outputSize ) {
return ;
}
const originalScore = c . score ;
for ( let j = selected . length - 1 ; j >= 0 ; -- j ) {
const iou3 = IOU ( boxes , c . boxIndex , selected [ j ] ) ;
if ( iou3 === 0 ) {
continue ;
}
c . score *= suppressFunc ( iou3 ) ;
if ( c . score <= scoreThreshold ) {
break ;
}
}
if ( originalScore === c . score ) {
selected . push ( c . boxIndex ) ;
}
} ) ;
return selected ;
}
function IOU ( boxes , i , j ) {
const boxesData = boxes . arraySync ( ) ;
const yminI = Math . min ( boxesData [ i ] [ 0 ] , boxesData [ i ] [ 2 ] ) ;
const xminI = Math . min ( boxesData [ i ] [ 1 ] , boxesData [ i ] [ 3 ] ) ;
const ymaxI = Math . max ( boxesData [ i ] [ 0 ] , boxesData [ i ] [ 2 ] ) ;
const xmaxI = Math . max ( boxesData [ i ] [ 1 ] , boxesData [ i ] [ 3 ] ) ;
const yminJ = Math . min ( boxesData [ j ] [ 0 ] , boxesData [ j ] [ 2 ] ) ;
const xminJ = Math . min ( boxesData [ j ] [ 1 ] , boxesData [ j ] [ 3 ] ) ;
const ymaxJ = Math . max ( boxesData [ j ] [ 0 ] , boxesData [ j ] [ 2 ] ) ;
const xmaxJ = Math . max ( boxesData [ j ] [ 1 ] , boxesData [ j ] [ 3 ] ) ;
const areaI = ( ymaxI - yminI ) * ( xmaxI - xminI ) ;
const areaJ = ( ymaxJ - yminJ ) * ( xmaxJ - xminJ ) ;
if ( areaI <= 0 || areaJ <= 0 ) {
return 0 ;
}
const intersectionYmin = Math . max ( yminI , yminJ ) ;
const intersectionXmin = Math . max ( xminI , xminJ ) ;
const intersectionYmax = Math . min ( ymaxI , ymaxJ ) ;
const intersectionXmax = Math . min ( xmaxI , xmaxJ ) ;
const intersectionArea = Math . max ( intersectionYmax - intersectionYmin , 0 ) * Math . max ( intersectionXmax - intersectionXmin , 0 ) ;
return intersectionArea / ( areaI + areaJ - intersectionArea ) ;
}
// build/src/ssdMobilenetv1/outputLayer.js
2020-10-09 03:31:31 +02:00
import {
add as add10 ,
div ,
exp ,
mul as mul2 ,
reshape ,
scalar as scalar5 ,
sigmoid as sigmoid2 ,
slice ,
stack as stack3 ,
sub as sub2 ,
tidy as tidy21 ,
tile ,
transpose as transpose2 ,
unstack as unstack5
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function getCenterCoordinatesAndSizesLayer ( x ) {
2020-10-09 03:31:31 +02:00
const vec = unstack5 ( transpose2 ( x , [ 1 , 0 ] ) ) ;
2020-09-16 18:46:34 +02:00
const sizes = [
2020-10-09 03:31:31 +02:00
sub2 ( vec [ 2 ] , vec [ 0 ] ) ,
sub2 ( vec [ 3 ] , vec [ 1 ] )
2020-09-16 18:46:34 +02:00
] ;
const centers = [
2020-10-09 03:31:31 +02:00
add10 ( vec [ 0 ] , div ( sizes [ 0 ] , scalar5 ( 2 ) ) ) ,
add10 ( vec [ 1 ] , div ( sizes [ 1 ] , scalar5 ( 2 ) ) )
2020-09-16 18:46:34 +02:00
] ;
return {
sizes ,
centers
} ;
}
function decodeBoxesLayer ( x0 , x1 ) {
const { sizes , centers } = getCenterCoordinatesAndSizesLayer ( x0 ) ;
2020-10-09 03:31:31 +02:00
const vec = unstack5 ( transpose2 ( x1 , [ 1 , 0 ] ) ) ;
const div0 _out = div ( mul2 ( exp ( div ( vec [ 2 ] , scalar5 ( 5 ) ) ) , sizes [ 0 ] ) , scalar5 ( 2 ) ) ;
const add0 _out = add10 ( mul2 ( div ( vec [ 0 ] , scalar5 ( 10 ) ) , sizes [ 0 ] ) , centers [ 0 ] ) ;
const div1 _out = div ( mul2 ( exp ( div ( vec [ 3 ] , scalar5 ( 5 ) ) ) , sizes [ 1 ] ) , scalar5 ( 2 ) ) ;
const add1 _out = add10 ( mul2 ( div ( vec [ 1 ] , scalar5 ( 10 ) ) , sizes [ 1 ] ) , centers [ 1 ] ) ;
return transpose2 ( stack3 ( [
sub2 ( add0 _out , div0 _out ) ,
sub2 ( add1 _out , div1 _out ) ,
add10 ( add0 _out , div0 _out ) ,
add10 ( add1 _out , div1 _out )
2020-09-16 18:46:34 +02:00
] ) , [ 1 , 0 ] ) ;
}
function outputLayer ( boxPredictions , classPredictions , params ) {
2020-10-09 03:31:31 +02:00
return tidy21 ( ( ) => {
2020-09-16 18:46:34 +02:00
const batchSize = boxPredictions . shape [ 0 ] ;
let boxes = decodeBoxesLayer ( reshape ( tile ( params . extra _dim , [ batchSize , 1 , 1 ] ) , [ - 1 , 4 ] ) , reshape ( boxPredictions , [ - 1 , 4 ] ) ) ;
boxes = reshape ( boxes , [ batchSize , boxes . shape [ 0 ] / batchSize , 4 ] ) ;
2020-10-09 03:31:31 +02:00
const scoresAndClasses = sigmoid2 ( slice ( classPredictions , [ 0 , 0 , 1 ] , [ - 1 , - 1 , - 1 ] ) ) ;
2020-09-16 18:46:34 +02:00
let scores = slice ( scoresAndClasses , [ 0 , 0 , 0 ] , [ - 1 , - 1 , 1 ] ) ;
scores = reshape ( scores , [ batchSize , scores . shape [ 1 ] ] ) ;
2020-10-09 03:31:31 +02:00
const boxesByBatch = unstack5 ( boxes ) ;
const scoresByBatch = unstack5 ( scores ) ;
2020-09-16 18:46:34 +02:00
return {
boxes : boxesByBatch ,
scores : scoresByBatch
} ;
} ) ;
}
// build/src/ssdMobilenetv1/boxPredictionLayer.js
2020-10-09 03:31:31 +02:00
import {
reshape as reshape2 ,
tidy as tidy22
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function boxPredictionLayer ( x , params ) {
2020-10-09 03:31:31 +02:00
return tidy22 ( ( ) => {
2020-09-16 18:46:34 +02:00
const batchSize = x . shape [ 0 ] ;
2020-10-09 03:31:31 +02:00
const boxPredictionEncoding = reshape2 ( convLayer ( x , params . box _encoding _predictor ) , [ batchSize , - 1 , 1 , 4 ] ) ;
const classPrediction = reshape2 ( convLayer ( x , params . class _predictor ) , [ batchSize , - 1 , 3 ] ) ;
2020-09-16 18:46:34 +02:00
return {
boxPredictionEncoding ,
classPrediction
} ;
} ) ;
}
// build/src/ssdMobilenetv1/predictionLayer.js
2020-10-09 03:31:31 +02:00
import {
concat as concat4 ,
tidy as tidy23
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function predictionLayer ( x , conv11 , params ) {
2020-10-09 03:31:31 +02:00
return tidy23 ( ( ) => {
2020-09-16 18:46:34 +02:00
const conv0 = pointwiseConvLayer ( x , params . conv _0 , [ 1 , 1 ] ) ;
const conv1 = pointwiseConvLayer ( conv0 , params . conv _1 , [ 2 , 2 ] ) ;
const conv22 = pointwiseConvLayer ( conv1 , params . conv _2 , [ 1 , 1 ] ) ;
const conv3 = pointwiseConvLayer ( conv22 , params . conv _3 , [ 2 , 2 ] ) ;
const conv4 = pointwiseConvLayer ( conv3 , params . conv _4 , [ 1 , 1 ] ) ;
const conv5 = pointwiseConvLayer ( conv4 , params . conv _5 , [ 2 , 2 ] ) ;
const conv6 = pointwiseConvLayer ( conv5 , params . conv _6 , [ 1 , 1 ] ) ;
const conv7 = pointwiseConvLayer ( conv6 , params . conv _7 , [ 2 , 2 ] ) ;
const boxPrediction0 = boxPredictionLayer ( conv11 , params . box _predictor _0 ) ;
const boxPrediction1 = boxPredictionLayer ( x , params . box _predictor _1 ) ;
const boxPrediction2 = boxPredictionLayer ( conv1 , params . box _predictor _2 ) ;
const boxPrediction3 = boxPredictionLayer ( conv3 , params . box _predictor _3 ) ;
const boxPrediction4 = boxPredictionLayer ( conv5 , params . box _predictor _4 ) ;
const boxPrediction5 = boxPredictionLayer ( conv7 , params . box _predictor _5 ) ;
2020-10-09 03:31:31 +02:00
const boxPredictions = concat4 ( [
2020-09-16 18:46:34 +02:00
boxPrediction0 . boxPredictionEncoding ,
boxPrediction1 . boxPredictionEncoding ,
boxPrediction2 . boxPredictionEncoding ,
boxPrediction3 . boxPredictionEncoding ,
boxPrediction4 . boxPredictionEncoding ,
boxPrediction5 . boxPredictionEncoding
] , 1 ) ;
2020-10-09 03:31:31 +02:00
const classPredictions = concat4 ( [
2020-09-16 18:46:34 +02:00
boxPrediction0 . classPrediction ,
boxPrediction1 . classPrediction ,
boxPrediction2 . classPrediction ,
boxPrediction3 . classPrediction ,
boxPrediction4 . classPrediction ,
boxPrediction5 . classPrediction
] , 1 ) ;
return {
boxPredictions ,
classPredictions
} ;
} ) ;
}
// build/src/ssdMobilenetv1/SsdMobilenetv1Options.js
class SsdMobilenetv1Options {
constructor ( { minConfidence , maxResults } = { } ) {
this . _name = "SsdMobilenetv1Options" ;
this . _minConfidence = minConfidence || 0.5 ;
this . _maxResults = maxResults || 100 ;
if ( typeof this . _minConfidence !== "number" || this . _minConfidence <= 0 || this . _minConfidence >= 1 ) {
throw new Error ( ` ${ this . _name } - expected minConfidence to be a number between 0 and 1 ` ) ;
}
if ( typeof this . _maxResults !== "number" ) {
throw new Error ( ` ${ this . _name } - expected maxResults to be a number ` ) ;
}
}
get minConfidence ( ) {
return this . _minConfidence ;
}
get maxResults ( ) {
return this . _maxResults ;
}
}
// build/src/ssdMobilenetv1/SsdMobilenetv1.js
2020-10-09 03:31:31 +02:00
import {
cast as cast4 ,
mul as mul3 ,
scalar as scalar6 ,
sub as sub3 ,
tidy as tidy24
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
class SsdMobilenetv1 extends NeuralNetwork {
constructor ( ) {
super ( "SsdMobilenetv1" ) ;
}
forwardInput ( input ) {
const { params } = this ;
if ( ! params ) {
throw new Error ( "SsdMobilenetv1 - load model before inference" ) ;
}
2020-10-09 03:31:31 +02:00
return tidy24 ( ( ) => {
const batchTensor = cast4 ( input . toBatchTensor ( 512 , false ) , "float32" ) ;
const x = sub3 ( mul3 ( batchTensor , scalar6 ( 0.007843137718737125 ) ) , scalar6 ( 1 ) ) ;
2020-09-16 18:46:34 +02:00
const features = mobileNetV1 ( x , params . mobilenetv1 ) ;
const { boxPredictions , classPredictions } = predictionLayer ( features . out , features . conv11 , params . prediction _layer ) ;
return outputLayer ( boxPredictions , classPredictions , params . output _layer ) ;
} ) ;
}
async forward ( input ) {
return this . forwardInput ( await toNetInput ( input ) ) ;
}
async locateFaces ( input , options = { } ) {
const { maxResults , minConfidence } = new SsdMobilenetv1Options ( options ) ;
const netInput = await toNetInput ( input ) ;
const { boxes : _boxes , scores : _scores } = this . forwardInput ( netInput ) ;
const boxes = _boxes [ 0 ] ;
const scores = _scores [ 0 ] ;
for ( let i = 1 ; i < _boxes . length ; i ++ ) {
_boxes [ i ] . dispose ( ) ;
_scores [ i ] . dispose ( ) ;
}
const scoresData = Array . from ( await scores . data ( ) ) ;
const iouThreshold = 0.5 ;
2020-10-09 03:31:31 +02:00
const indices = nonMaxSuppression2 ( boxes , scoresData , maxResults , iouThreshold , minConfidence ) ;
2020-09-16 18:46:34 +02:00
const reshapedDims = netInput . getReshapedInputDimensions ( 0 ) ;
const inputSize = netInput . inputSize ;
const padX = inputSize / reshapedDims . width ;
const padY = inputSize / reshapedDims . height ;
const boxesData = boxes . arraySync ( ) ;
const results = indices . map ( ( idx ) => {
const [ top , bottom ] = [
Math . max ( 0 , boxesData [ idx ] [ 0 ] ) ,
Math . min ( 1 , boxesData [ idx ] [ 2 ] )
] . map ( ( val ) => val * padY ) ;
const [ left , right ] = [
Math . max ( 0 , boxesData [ idx ] [ 1 ] ) ,
Math . min ( 1 , boxesData [ idx ] [ 3 ] )
] . map ( ( val ) => val * padX ) ;
return new FaceDetection ( scoresData [ idx ] , new Rect ( left , top , right - left , bottom - top ) , {
height : netInput . getInputHeight ( 0 ) ,
width : netInput . getInputWidth ( 0 )
} ) ;
} ) ;
boxes . dispose ( ) ;
scores . dispose ( ) ;
return results ;
}
getDefaultModelName ( ) {
return "ssd_mobilenetv1_model" ;
}
extractParamsFromWeigthMap ( weightMap ) {
return extractParamsFromWeigthMap11 ( weightMap ) ;
}
extractParams ( weights ) {
return extractParams11 ( weights ) ;
}
}
// build/src/ssdMobilenetv1/index.js
function createSsdMobilenetv1 ( weights ) {
const net = new SsdMobilenetv1 ( ) ;
net . extractWeights ( weights ) ;
return net ;
}
function createFaceDetectionNet ( weights ) {
return createSsdMobilenetv1 ( weights ) ;
}
class FaceDetectionNet extends SsdMobilenetv1 {
}
// build/src/tinyYolov2/const.js
const IOU _THRESHOLD = 0.4 ;
const BOX _ANCHORS = [
new Point ( 0.738768 , 0.874946 ) ,
new Point ( 2.42204 , 2.65704 ) ,
new Point ( 4.30971 , 7.04493 ) ,
new Point ( 10.246 , 4.59428 ) ,
new Point ( 12.6868 , 11.8741 )
] ;
const BOX _ANCHORS _SEPARABLE = [
new Point ( 1.603231 , 2.094468 ) ,
new Point ( 6.041143 , 7.080126 ) ,
new Point ( 2.882459 , 3.518061 ) ,
new Point ( 4.266906 , 5.178857 ) ,
new Point ( 9.041765 , 10.66308 )
] ;
const MEAN _RGB _SEPARABLE = [ 117.001 , 114.697 , 97.404 ] ;
const DEFAULT _MODEL _NAME = "tiny_yolov2_model" ;
const DEFAULT _MODEL _NAME _SEPARABLE _CONV = "tiny_yolov2_separable_conv_model" ;
// build/src/tinyYolov2/config.js
2020-10-09 03:31:31 +02:00
const isNumber = ( arg ) => typeof arg === "number" ;
2020-09-16 18:46:34 +02:00
function validateConfig ( config2 ) {
if ( ! config2 ) {
throw new Error ( ` invalid config: ${ config2 } ` ) ;
}
if ( typeof config2 . withSeparableConvs !== "boolean" ) {
throw new Error ( ` config.withSeparableConvs has to be a boolean, have: ${ config2 . withSeparableConvs } ` ) ;
}
2020-10-09 03:31:31 +02:00
if ( ! isNumber ( config2 . iouThreshold ) || config2 . iouThreshold < 0 || config2 . iouThreshold > 1 ) {
2020-09-16 18:46:34 +02:00
throw new Error ( ` config.iouThreshold has to be a number between [0, 1], have: ${ config2 . iouThreshold } ` ) ;
}
if ( ! Array . isArray ( config2 . classes ) || ! config2 . classes . length || ! config2 . classes . every ( ( c ) => typeof c === "string" ) ) {
throw new Error ( ` config.classes has to be an array class names: string[], have: ${ JSON . stringify ( config2 . classes ) } ` ) ;
}
2020-10-09 03:31:31 +02:00
if ( ! Array . isArray ( config2 . anchors ) || ! config2 . anchors . length || ! config2 . anchors . map ( ( a ) => a || { } ) . every ( ( a ) => isNumber ( a . x ) && isNumber ( a . y ) ) ) {
2020-09-16 18:46:34 +02:00
throw new Error ( ` config.anchors has to be an array of { x: number, y: number }, have: ${ JSON . stringify ( config2 . anchors ) } ` ) ;
}
2020-10-09 03:31:31 +02:00
if ( config2 . meanRgb && ( ! Array . isArray ( config2 . meanRgb ) || config2 . meanRgb . length !== 3 || ! config2 . meanRgb . every ( isNumber ) ) ) {
2020-09-16 18:46:34 +02:00
throw new Error ( ` config.meanRgb has to be an array of shape [number, number, number], have: ${ JSON . stringify ( config2 . meanRgb ) } ` ) ;
}
}
// build/src/tinyYolov2/leaky.js
2020-10-09 03:31:31 +02:00
import {
add as add11 ,
mul as mul4 ,
relu as relu6 ,
scalar as scalar7 ,
sub as sub4 ,
tidy as tidy25
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function leaky ( x ) {
2020-10-09 03:31:31 +02:00
return tidy25 ( ( ) => {
const min = mul4 ( x , scalar7 ( 0.10000000149011612 ) ) ;
return add11 ( relu6 ( sub4 ( x , min ) ) , min ) ;
2020-09-16 18:46:34 +02:00
} ) ;
}
// build/src/tinyYolov2/convWithBatchNorm.js
2020-10-09 03:31:31 +02:00
import {
add as add12 ,
conv2d as conv2d6 ,
mul as mul5 ,
pad ,
sub as sub5 ,
tidy as tidy26
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function convWithBatchNorm ( x , params ) {
2020-10-09 03:31:31 +02:00
return tidy26 ( ( ) => {
2020-09-16 18:46:34 +02:00
let out = pad ( x , [ [ 0 , 0 ] , [ 1 , 1 ] , [ 1 , 1 ] , [ 0 , 0 ] ] ) ;
2020-10-09 03:31:31 +02:00
out = conv2d6 ( out , params . conv . filters , [ 1 , 1 ] , "valid" ) ;
out = sub5 ( out , params . bn . sub ) ;
out = mul5 ( out , params . bn . truediv ) ;
out = add12 ( out , params . conv . bias ) ;
2020-09-16 18:46:34 +02:00
return leaky ( out ) ;
} ) ;
}
// build/src/tinyYolov2/depthwiseSeparableConv.js
2020-10-09 03:31:31 +02:00
import {
add as add13 ,
pad as pad2 ,
separableConv2d as separableConv2d2 ,
tidy as tidy27
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function depthwiseSeparableConv3 ( x , params ) {
2020-10-09 03:31:31 +02:00
return tidy27 ( ( ) => {
let out = pad2 ( x , [ [ 0 , 0 ] , [ 1 , 1 ] , [ 1 , 1 ] , [ 0 , 0 ] ] ) ;
out = separableConv2d2 ( out , params . depthwise _filter , params . pointwise _filter , [ 1 , 1 ] , "valid" ) ;
out = add13 ( out , params . bias ) ;
2020-09-16 18:46:34 +02:00
return leaky ( out ) ;
} ) ;
}
// build/src/tinyYolov2/extractParams.js
2020-10-09 03:31:31 +02:00
import {
tensor1d as tensor1d6
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
function extractorsFactory9 ( extractWeights , paramMappings ) {
const extractConvParams = extractConvParamsFactory ( extractWeights , paramMappings ) ;
function extractBatchNormParams ( size , mappedPrefix ) {
2020-10-09 03:31:31 +02:00
const sub6 = tensor1d6 ( extractWeights ( size ) ) ;
const truediv = tensor1d6 ( extractWeights ( size ) ) ;
2020-09-16 18:46:34 +02:00
paramMappings . push ( { paramPath : ` ${ mappedPrefix } /sub ` } , { paramPath : ` ${ mappedPrefix } /truediv ` } ) ;
2020-10-09 03:31:31 +02:00
return { sub : sub6 , truediv } ;
2020-09-16 18:46:34 +02:00
}
function extractConvWithBatchNormParams ( channelsIn , channelsOut , mappedPrefix ) {
const conv3 = extractConvParams ( channelsIn , channelsOut , 3 , ` ${ mappedPrefix } /conv ` ) ;
const bn = extractBatchNormParams ( channelsOut , ` ${ mappedPrefix } /bn ` ) ;
return { conv : conv3 , bn } ;
}
const extractSeparableConvParams = extractSeparableConvParamsFactory ( extractWeights , paramMappings ) ;
return {
extractConvParams ,
extractConvWithBatchNormParams ,
extractSeparableConvParams
} ;
}
function extractParams13 ( weights , config2 , boxEncodingSize , filterSizes ) {
const { extractWeights , getRemainingWeights } = extractWeightsFactory ( weights ) ;
const paramMappings = [ ] ;
const { extractConvParams , extractConvWithBatchNormParams , extractSeparableConvParams } = extractorsFactory9 ( extractWeights , paramMappings ) ;
let params ;
if ( config2 . withSeparableConvs ) {
const [ s0 , s1 , s2 , s3 , s4 , s5 , s6 , s7 , s8 ] = filterSizes ;
const conv0 = config2 . isFirstLayerConv2d ? extractConvParams ( s0 , s1 , 3 , "conv0" ) : extractSeparableConvParams ( s0 , s1 , "conv0" ) ;
const conv1 = extractSeparableConvParams ( s1 , s2 , "conv1" ) ;
const conv22 = extractSeparableConvParams ( s2 , s3 , "conv2" ) ;
const conv3 = extractSeparableConvParams ( s3 , s4 , "conv3" ) ;
const conv4 = extractSeparableConvParams ( s4 , s5 , "conv4" ) ;
const conv5 = extractSeparableConvParams ( s5 , s6 , "conv5" ) ;
const conv6 = s7 ? extractSeparableConvParams ( s6 , s7 , "conv6" ) : void 0 ;
const conv7 = s8 ? extractSeparableConvParams ( s7 , s8 , "conv7" ) : void 0 ;
const conv8 = extractConvParams ( s8 || s7 || s6 , 5 * boxEncodingSize , 1 , "conv8" ) ;
params = { conv0 , conv1 , conv2 : conv22 , conv3 , conv4 , conv5 , conv6 , conv7 , conv8 } ;
} else {
const [ s0 , s1 , s2 , s3 , s4 , s5 , s6 , s7 , s8 ] = filterSizes ;
const conv0 = extractConvWithBatchNormParams ( s0 , s1 , "conv0" ) ;
const conv1 = extractConvWithBatchNormParams ( s1 , s2 , "conv1" ) ;
const conv22 = extractConvWithBatchNormParams ( s2 , s3 , "conv2" ) ;
const conv3 = extractConvWithBatchNormParams ( s3 , s4 , "conv3" ) ;
const conv4 = extractConvWithBatchNormParams ( s4 , s5 , "conv4" ) ;
const conv5 = extractConvWithBatchNormParams ( s5 , s6 , "conv5" ) ;
const conv6 = extractConvWithBatchNormParams ( s6 , s7 , "conv6" ) ;
const conv7 = extractConvWithBatchNormParams ( s7 , s8 , "conv7" ) ;
const conv8 = extractConvParams ( s8 , 5 * boxEncodingSize , 1 , "conv8" ) ;
params = { conv0 , conv1 , conv2 : conv22 , conv3 , conv4 , conv5 , conv6 , conv7 , conv8 } ;
}
if ( getRemainingWeights ( ) . length !== 0 ) {
throw new Error ( ` weights remaing after extract: ${ getRemainingWeights ( ) . length } ` ) ;
}
return { params , paramMappings } ;
}
// build/src/tinyYolov2/extractParamsFromWeigthMap.js
function extractorsFactory10 ( weightMap , paramMappings ) {
const extractWeightEntry = extractWeightEntryFactory ( weightMap , paramMappings ) ;
function extractBatchNormParams ( prefix ) {
2020-10-09 03:31:31 +02:00
const sub6 = extractWeightEntry ( ` ${ prefix } /sub ` , 1 ) ;
2020-09-16 18:46:34 +02:00
const truediv = extractWeightEntry ( ` ${ prefix } /truediv ` , 1 ) ;
2020-10-09 03:31:31 +02:00
return { sub : sub6 , truediv } ;
2020-09-16 18:46:34 +02:00
}
function extractConvParams ( prefix ) {
const filters = extractWeightEntry ( ` ${ prefix } /filters ` , 4 ) ;
const bias = extractWeightEntry ( ` ${ prefix } /bias ` , 1 ) ;
return { filters , bias } ;
}
function extractConvWithBatchNormParams ( prefix ) {
const conv3 = extractConvParams ( ` ${ prefix } /conv ` ) ;
const bn = extractBatchNormParams ( ` ${ prefix } /bn ` ) ;
return { conv : conv3 , bn } ;
}
const extractSeparableConvParams = loadSeparableConvParamsFactory ( extractWeightEntry ) ;
return {
extractConvParams ,
extractConvWithBatchNormParams ,
extractSeparableConvParams
} ;
}
function extractParamsFromWeigthMap13 ( weightMap , config2 ) {
const paramMappings = [ ] ;
const { extractConvParams , extractConvWithBatchNormParams , extractSeparableConvParams } = extractorsFactory10 ( weightMap , paramMappings ) ;
let params ;
if ( config2 . withSeparableConvs ) {
const numFilters = config2 . filterSizes && config2 . filterSizes . length || 9 ;
params = {
conv0 : config2 . isFirstLayerConv2d ? extractConvParams ( "conv0" ) : extractSeparableConvParams ( "conv0" ) ,
conv1 : extractSeparableConvParams ( "conv1" ) ,
conv2 : extractSeparableConvParams ( "conv2" ) ,
conv3 : extractSeparableConvParams ( "conv3" ) ,
conv4 : extractSeparableConvParams ( "conv4" ) ,
conv5 : extractSeparableConvParams ( "conv5" ) ,
conv6 : numFilters > 7 ? extractSeparableConvParams ( "conv6" ) : void 0 ,
conv7 : numFilters > 8 ? extractSeparableConvParams ( "conv7" ) : void 0 ,
conv8 : extractConvParams ( "conv8" )
} ;
} else {
params = {
conv0 : extractConvWithBatchNormParams ( "conv0" ) ,
conv1 : extractConvWithBatchNormParams ( "conv1" ) ,
conv2 : extractConvWithBatchNormParams ( "conv2" ) ,
conv3 : extractConvWithBatchNormParams ( "conv3" ) ,
conv4 : extractConvWithBatchNormParams ( "conv4" ) ,
conv5 : extractConvWithBatchNormParams ( "conv5" ) ,
conv6 : extractConvWithBatchNormParams ( "conv6" ) ,
conv7 : extractConvWithBatchNormParams ( "conv7" ) ,
conv8 : extractConvParams ( "conv8" )
} ;
}
disposeUnusedWeightTensors ( weightMap , paramMappings ) ;
return { params , paramMappings } ;
}
// build/src/tinyYolov2/TinyYolov2Options.js
var TinyYolov2SizeType ;
( function ( TinyYolov2SizeType2 ) {
TinyYolov2SizeType2 [ TinyYolov2SizeType2 [ "XS" ] = 224 ] = "XS" ;
TinyYolov2SizeType2 [ TinyYolov2SizeType2 [ "SM" ] = 320 ] = "SM" ;
TinyYolov2SizeType2 [ TinyYolov2SizeType2 [ "MD" ] = 416 ] = "MD" ;
TinyYolov2SizeType2 [ TinyYolov2SizeType2 [ "LG" ] = 608 ] = "LG" ;
} ) ( TinyYolov2SizeType || ( TinyYolov2SizeType = { } ) ) ;
class TinyYolov2Options {
constructor ( { inputSize , scoreThreshold } = { } ) {
this . _name = "TinyYolov2Options" ;
this . _inputSize = inputSize || 416 ;
this . _scoreThreshold = scoreThreshold || 0.5 ;
if ( typeof this . _inputSize !== "number" || this . _inputSize % 32 !== 0 ) {
throw new Error ( ` ${ this . _name } - expected inputSize to be a number divisible by 32 ` ) ;
}
if ( typeof this . _scoreThreshold !== "number" || this . _scoreThreshold <= 0 || this . _scoreThreshold >= 1 ) {
throw new Error ( ` ${ this . _name } - expected scoreThreshold to be a number between 0 and 1 ` ) ;
}
}
get inputSize ( ) {
return this . _inputSize ;
}
get scoreThreshold ( ) {
return this . _scoreThreshold ;
}
}
// build/src/tinyYolov2/TinyYolov2Base.js
2020-10-09 03:31:31 +02:00
import {
cast as cast5 ,
maxPool as maxPool3 ,
scalar as scalar8 ,
softmax as softmax3 ,
tidy as tidy28 ,
unstack as unstack6
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
class TinyYolov2Base extends NeuralNetwork {
constructor ( config2 ) {
super ( "TinyYolov2" ) ;
validateConfig ( config2 ) ;
this . _config = config2 ;
}
get config ( ) {
return this . _config ;
}
get withClassScores ( ) {
return this . config . withClassScores || this . config . classes . length > 1 ;
}
get boxEncodingSize ( ) {
return 5 + ( this . withClassScores ? this . config . classes . length : 0 ) ;
}
runTinyYolov2 ( x , params ) {
let out = convWithBatchNorm ( x , params . conv0 ) ;
2020-10-09 03:31:31 +02:00
out = maxPool3 ( out , [ 2 , 2 ] , [ 2 , 2 ] , "same" ) ;
2020-09-16 18:46:34 +02:00
out = convWithBatchNorm ( out , params . conv1 ) ;
2020-10-09 03:31:31 +02:00
out = maxPool3 ( out , [ 2 , 2 ] , [ 2 , 2 ] , "same" ) ;
2020-09-16 18:46:34 +02:00
out = convWithBatchNorm ( out , params . conv2 ) ;
2020-10-09 03:31:31 +02:00
out = maxPool3 ( out , [ 2 , 2 ] , [ 2 , 2 ] , "same" ) ;
2020-09-16 18:46:34 +02:00
out = convWithBatchNorm ( out , params . conv3 ) ;
2020-10-09 03:31:31 +02:00
out = maxPool3 ( out , [ 2 , 2 ] , [ 2 , 2 ] , "same" ) ;
2020-09-16 18:46:34 +02:00
out = convWithBatchNorm ( out , params . conv4 ) ;
2020-10-09 03:31:31 +02:00
out = maxPool3 ( out , [ 2 , 2 ] , [ 2 , 2 ] , "same" ) ;
2020-09-16 18:46:34 +02:00
out = convWithBatchNorm ( out , params . conv5 ) ;
2020-10-09 03:31:31 +02:00
out = maxPool3 ( out , [ 2 , 2 ] , [ 1 , 1 ] , "same" ) ;
2020-09-16 18:46:34 +02:00
out = convWithBatchNorm ( out , params . conv6 ) ;
out = convWithBatchNorm ( out , params . conv7 ) ;
return convLayer ( out , params . conv8 , "valid" , false ) ;
}
runMobilenet ( x , params ) {
let out = this . config . isFirstLayerConv2d ? leaky ( convLayer ( x , params . conv0 , "valid" , false ) ) : depthwiseSeparableConv3 ( x , params . conv0 ) ;
2020-10-09 03:31:31 +02:00
out = maxPool3 ( out , [ 2 , 2 ] , [ 2 , 2 ] , "same" ) ;
2020-09-16 18:46:34 +02:00
out = depthwiseSeparableConv3 ( out , params . conv1 ) ;
2020-10-09 03:31:31 +02:00
out = maxPool3 ( out , [ 2 , 2 ] , [ 2 , 2 ] , "same" ) ;
2020-09-16 18:46:34 +02:00
out = depthwiseSeparableConv3 ( out , params . conv2 ) ;
2020-10-09 03:31:31 +02:00
out = maxPool3 ( out , [ 2 , 2 ] , [ 2 , 2 ] , "same" ) ;
2020-09-16 18:46:34 +02:00
out = depthwiseSeparableConv3 ( out , params . conv3 ) ;
2020-10-09 03:31:31 +02:00
out = maxPool3 ( out , [ 2 , 2 ] , [ 2 , 2 ] , "same" ) ;
2020-09-16 18:46:34 +02:00
out = depthwiseSeparableConv3 ( out , params . conv4 ) ;
2020-10-09 03:31:31 +02:00
out = maxPool3 ( out , [ 2 , 2 ] , [ 2 , 2 ] , "same" ) ;
2020-09-16 18:46:34 +02:00
out = depthwiseSeparableConv3 ( out , params . conv5 ) ;
2020-10-09 03:31:31 +02:00
out = maxPool3 ( out , [ 2 , 2 ] , [ 1 , 1 ] , "same" ) ;
2020-09-16 18:46:34 +02:00
out = params . conv6 ? depthwiseSeparableConv3 ( out , params . conv6 ) : out ;
out = params . conv7 ? depthwiseSeparableConv3 ( out , params . conv7 ) : out ;
return convLayer ( out , params . conv8 , "valid" , false ) ;
}
forwardInput ( input , inputSize ) {
const { params } = this ;
if ( ! params ) {
throw new Error ( "TinyYolov2 - load model before inference" ) ;
}
2020-10-09 03:31:31 +02:00
return tidy28 ( ( ) => {
let batchTensor = cast5 ( input . toBatchTensor ( inputSize , false ) , "float32" ) ;
2020-09-16 18:46:34 +02:00
batchTensor = this . config . meanRgb ? normalize ( batchTensor , this . config . meanRgb ) : batchTensor ;
2020-10-09 03:31:31 +02:00
batchTensor = batchTensor . div ( scalar8 ( 256 ) ) ;
2020-09-16 18:46:34 +02:00
return this . config . withSeparableConvs ? this . runMobilenet ( batchTensor , params ) : this . runTinyYolov2 ( batchTensor , params ) ;
} ) ;
}
async forward ( input , inputSize ) {
return await this . forwardInput ( await toNetInput ( input ) , inputSize ) ;
}
async detect ( input , forwardParams = { } ) {
const { inputSize , scoreThreshold } = new TinyYolov2Options ( forwardParams ) ;
const netInput = await toNetInput ( input ) ;
const out = await this . forwardInput ( netInput , inputSize ) ;
2020-10-09 03:31:31 +02:00
const out0 = tidy28 ( ( ) => unstack6 ( out ) [ 0 ] . expandDims ( ) ) ;
2020-09-16 18:46:34 +02:00
const inputDimensions = {
width : netInput . getInputWidth ( 0 ) ,
height : netInput . getInputHeight ( 0 )
} ;
const results = await this . extractBoxes ( out0 , netInput . getReshapedInputDimensions ( 0 ) , scoreThreshold ) ;
out . dispose ( ) ;
out0 . dispose ( ) ;
const boxes = results . map ( ( res ) => res . box ) ;
const scores = results . map ( ( res ) => res . score ) ;
const classScores = results . map ( ( res ) => res . classScore ) ;
const classNames = results . map ( ( res ) => this . config . classes [ res . label ] ) ;
2020-10-09 03:31:31 +02:00
const indices = nonMaxSuppression ( boxes . map ( ( box ) => box . rescale ( inputSize ) ) , scores , this . config . iouThreshold , true ) ;
2020-09-16 18:46:34 +02:00
const detections = indices . map ( ( idx ) => new ObjectDetection ( scores [ idx ] , classScores [ idx ] , classNames [ idx ] , boxes [ idx ] , inputDimensions ) ) ;
return detections ;
}
getDefaultModelName ( ) {
return "" ;
}
extractParamsFromWeigthMap ( weightMap ) {
return extractParamsFromWeigthMap13 ( weightMap , this . config ) ;
}
extractParams ( weights ) {
const filterSizes = this . config . filterSizes || TinyYolov2Base . DEFAULT _FILTER _SIZES ;
const numFilters = filterSizes ? filterSizes . length : void 0 ;
if ( numFilters !== 7 && numFilters !== 8 && numFilters !== 9 ) {
throw new Error ( ` TinyYolov2 - expected 7 | 8 | 9 convolutional filters, but found ${ numFilters } filterSizes in config ` ) ;
}
return extractParams13 ( weights , this . config , this . boxEncodingSize , filterSizes ) ;
}
async extractBoxes ( outputTensor , inputBlobDimensions , scoreThreshold ) {
const { width , height } = inputBlobDimensions ;
const inputSize = Math . max ( width , height ) ;
const correctionFactorX = inputSize / width ;
const correctionFactorY = inputSize / height ;
const numCells = outputTensor . shape [ 1 ] ;
const numBoxes = this . config . anchors . length ;
2020-10-09 03:31:31 +02:00
const [ boxesTensor , scoresTensor , classScoresTensor ] = tidy28 ( ( ) => {
2020-09-16 18:46:34 +02:00
const reshaped = outputTensor . reshape ( [ numCells , numCells , numBoxes , this . boxEncodingSize ] ) ;
const boxes = reshaped . slice ( [ 0 , 0 , 0 , 0 ] , [ numCells , numCells , numBoxes , 4 ] ) ;
const scores = reshaped . slice ( [ 0 , 0 , 0 , 4 ] , [ numCells , numCells , numBoxes , 1 ] ) ;
2020-10-09 03:31:31 +02:00
const classScores = this . withClassScores ? softmax3 ( reshaped . slice ( [ 0 , 0 , 0 , 5 ] , [ numCells , numCells , numBoxes , this . config . classes . length ] ) , 3 ) : scalar8 ( 0 ) ;
2020-09-16 18:46:34 +02:00
return [ boxes , scores , classScores ] ;
} ) ;
const results = [ ] ;
const scoresData = await scoresTensor . array ( ) ;
const boxesData = await boxesTensor . array ( ) ;
for ( let row = 0 ; row < numCells ; row ++ ) {
for ( let col = 0 ; col < numCells ; col ++ ) {
for ( let anchor = 0 ; anchor < numBoxes ; anchor ++ ) {
2020-10-09 03:31:31 +02:00
const score = sigmoid ( scoresData [ row ] [ col ] [ anchor ] [ 0 ] ) ;
2020-09-16 18:46:34 +02:00
if ( ! scoreThreshold || score > scoreThreshold ) {
2020-10-09 03:31:31 +02:00
const ctX = ( col + sigmoid ( boxesData [ row ] [ col ] [ anchor ] [ 0 ] ) ) / numCells * correctionFactorX ;
const ctY = ( row + sigmoid ( boxesData [ row ] [ col ] [ anchor ] [ 1 ] ) ) / numCells * correctionFactorY ;
2020-09-16 18:46:34 +02:00
const width2 = Math . exp ( boxesData [ row ] [ col ] [ anchor ] [ 2 ] ) * this . config . anchors [ anchor ] . x / numCells * correctionFactorX ;
const height2 = Math . exp ( boxesData [ row ] [ col ] [ anchor ] [ 3 ] ) * this . config . anchors [ anchor ] . y / numCells * correctionFactorY ;
const x = ctX - width2 / 2 ;
const y = ctY - height2 / 2 ;
const pos = { row , col , anchor } ;
const { classScore , label } = this . withClassScores ? await this . extractPredictedClass ( classScoresTensor , pos ) : { classScore : 1 , label : 0 } ;
2020-10-09 03:31:31 +02:00
results . push ( {
2020-09-16 18:46:34 +02:00
box : new BoundingBox ( x , y , x + width2 , y + height2 ) ,
score ,
classScore : score * classScore ,
2020-10-09 03:31:31 +02:00
label ,
... pos
} ) ;
2020-09-16 18:46:34 +02:00
}
}
}
}
boxesTensor . dispose ( ) ;
scoresTensor . dispose ( ) ;
classScoresTensor . dispose ( ) ;
return results ;
}
async extractPredictedClass ( classesTensor , pos ) {
const { row , col , anchor } = pos ;
const classesData = await classesTensor . array ( ) ;
return Array ( this . config . classes . length ) . fill ( 0 ) . map ( ( _ , i ) => classesData [ row ] [ col ] [ anchor ] [ i ] ) . map ( ( classScore , label ) => ( {
classScore ,
label
2020-10-09 03:31:31 +02:00
} ) ) . reduce ( ( max , curr ) => max . classScore > curr . classScore ? max : curr ) ;
2020-09-16 18:46:34 +02:00
}
}
TinyYolov2Base . DEFAULT _FILTER _SIZES = [
3 ,
16 ,
32 ,
64 ,
128 ,
256 ,
512 ,
1024 ,
1024
] ;
// build/src/tinyYolov2/TinyYolov2.js
class TinyYolov2 extends TinyYolov2Base {
constructor ( withSeparableConvs = true ) {
const config2 = Object . assign ( { } , {
withSeparableConvs ,
iouThreshold : IOU _THRESHOLD ,
classes : [ "face" ]
} , withSeparableConvs ? {
anchors : BOX _ANCHORS _SEPARABLE ,
meanRgb : MEAN _RGB _SEPARABLE
} : {
anchors : BOX _ANCHORS ,
withClassScores : true
} ) ;
super ( config2 ) ;
}
get withSeparableConvs ( ) {
return this . config . withSeparableConvs ;
}
get anchors ( ) {
return this . config . anchors ;
}
async locateFaces ( input , forwardParams ) {
const objectDetections = await this . detect ( input , forwardParams ) ;
return objectDetections . map ( ( det ) => new FaceDetection ( det . score , det . relativeBox , { width : det . imageWidth , height : det . imageHeight } ) ) ;
}
getDefaultModelName ( ) {
return this . withSeparableConvs ? DEFAULT _MODEL _NAME _SEPARABLE _CONV : DEFAULT _MODEL _NAME ;
}
extractParamsFromWeigthMap ( weightMap ) {
return super . extractParamsFromWeigthMap ( weightMap ) ;
}
}
// build/src/tinyYolov2/types.js
// build/src/tinyYolov2/index.js
function createTinyYolov2 ( weights , withSeparableConvs = true ) {
const net = new TinyYolov2 ( withSeparableConvs ) ;
net . extractWeights ( weights ) ;
return net ;
}
// build/src/tinyFaceDetector/TinyFaceDetectorOptions.js
class TinyFaceDetectorOptions extends TinyYolov2Options {
constructor ( ) {
super ( ... arguments ) ;
this . _name = "TinyFaceDetectorOptions" ;
}
}
// build/src/globalApi/ComposableTask.js
class ComposableTask {
async then ( onfulfilled ) {
return onfulfilled ( await this . run ( ) ) ;
}
async run ( ) {
throw new Error ( "ComposableTask - run is not implemented" ) ;
}
}
// build/src/globalApi/extractFacesAndComputeResults.js
2020-10-09 03:31:31 +02:00
import {
Tensor as Tensor4
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
async function extractAllFacesAndComputeResults ( parentResults , input , computeResults , extractedFaces , getRectForAlignment = ( { alignedRect } ) => alignedRect ) {
const faceBoxes = parentResults . map ( ( parentResult ) => isWithFaceLandmarks ( parentResult ) ? getRectForAlignment ( parentResult ) : parentResult . detection ) ;
2020-10-09 03:31:31 +02:00
const faces = extractedFaces || ( input instanceof Tensor4 ? await extractFaceTensors ( input , faceBoxes ) : await extractFaces ( input , faceBoxes ) ) ;
2020-09-16 18:46:34 +02:00
const results = await computeResults ( faces ) ;
2020-10-09 03:31:31 +02:00
faces . forEach ( ( f ) => f instanceof Tensor4 && f . dispose ( ) ) ;
2020-09-16 18:46:34 +02:00
return results ;
}
async function extractSingleFaceAndComputeResult ( parentResult , input , computeResult , extractedFaces , getRectForAlignment ) {
return extractAllFacesAndComputeResults ( [ parentResult ] , input , async ( faces ) => computeResult ( faces [ 0 ] ) , extractedFaces , getRectForAlignment ) ;
}
// build/src/tinyFaceDetector/const.js
const IOU _THRESHOLD2 = 0.4 ;
const BOX _ANCHORS2 = [
new Point ( 1.603231 , 2.094468 ) ,
new Point ( 6.041143 , 7.080126 ) ,
new Point ( 2.882459 , 3.518061 ) ,
new Point ( 4.266906 , 5.178857 ) ,
new Point ( 9.041765 , 10.66308 )
] ;
const MEAN _RGB = [ 117.001 , 114.697 , 97.404 ] ;
// build/src/tinyFaceDetector/TinyFaceDetector.js
class TinyFaceDetector extends TinyYolov2Base {
constructor ( ) {
const config2 = {
withSeparableConvs : true ,
iouThreshold : IOU _THRESHOLD2 ,
classes : [ "face" ] ,
anchors : BOX _ANCHORS2 ,
meanRgb : MEAN _RGB ,
isFirstLayerConv2d : true ,
filterSizes : [ 3 , 16 , 32 , 64 , 128 , 256 , 512 ]
} ;
super ( config2 ) ;
}
get anchors ( ) {
return this . config . anchors ;
}
async locateFaces ( input , forwardParams ) {
const objectDetections = await this . detect ( input , forwardParams ) ;
return objectDetections . map ( ( det ) => new FaceDetection ( det . score , det . relativeBox , { width : det . imageWidth , height : det . imageHeight } ) ) ;
}
getDefaultModelName ( ) {
return "tiny_face_detector_model" ;
}
extractParamsFromWeigthMap ( weightMap ) {
return super . extractParamsFromWeigthMap ( weightMap ) ;
}
}
// build/src/globalApi/nets.js
const nets = {
ssdMobilenetv1 : new SsdMobilenetv1 ( ) ,
tinyFaceDetector : new TinyFaceDetector ( ) ,
tinyYolov2 : new TinyYolov2 ( ) ,
faceLandmark68Net : new FaceLandmark68Net ( ) ,
faceLandmark68TinyNet : new FaceLandmark68TinyNet ( ) ,
faceRecognitionNet : new FaceRecognitionNet ( ) ,
faceExpressionNet : new FaceExpressionNet ( ) ,
ageGenderNet : new AgeGenderNet ( )
} ;
const ssdMobilenetv1 = ( input , options ) => nets . ssdMobilenetv1 . locateFaces ( input , options ) ;
const tinyFaceDetector = ( input , options ) => nets . tinyFaceDetector . locateFaces ( input , options ) ;
const tinyYolov23 = ( input , options ) => nets . tinyYolov2 . locateFaces ( input , options ) ;
const detectFaceLandmarks = ( input ) => nets . faceLandmark68Net . detectLandmarks ( input ) ;
const detectFaceLandmarksTiny = ( input ) => nets . faceLandmark68TinyNet . detectLandmarks ( input ) ;
const computeFaceDescriptor = ( input ) => nets . faceRecognitionNet . computeFaceDescriptor ( input ) ;
const recognizeFaceExpressions = ( input ) => nets . faceExpressionNet . predictExpressions ( input ) ;
const predictAgeAndGender = ( input ) => nets . ageGenderNet . predictAgeAndGender ( input ) ;
const loadSsdMobilenetv1Model = ( url ) => nets . ssdMobilenetv1 . load ( url ) ;
const loadTinyFaceDetectorModel = ( url ) => nets . tinyFaceDetector . load ( url ) ;
const loadTinyYolov2Model = ( url ) => nets . tinyYolov2 . load ( url ) ;
const loadFaceLandmarkModel = ( url ) => nets . faceLandmark68Net . load ( url ) ;
const loadFaceLandmarkTinyModel = ( url ) => nets . faceLandmark68TinyNet . load ( url ) ;
const loadFaceRecognitionModel = ( url ) => nets . faceRecognitionNet . load ( url ) ;
const loadFaceExpressionModel = ( url ) => nets . faceExpressionNet . load ( url ) ;
const loadAgeGenderModel = ( url ) => nets . ageGenderNet . load ( url ) ;
const loadFaceDetectionModel = loadSsdMobilenetv1Model ;
const locateFaces = ssdMobilenetv1 ;
const detectLandmarks = detectFaceLandmarks ;
// build/src/globalApi/PredictFaceExpressionsTask.js
class PredictFaceExpressionsTaskBase extends ComposableTask {
constructor ( parentTask , input , extractedFaces ) {
super ( ) ;
this . parentTask = parentTask ;
this . input = input ;
this . extractedFaces = extractedFaces ;
}
}
class PredictAllFaceExpressionsTask extends PredictFaceExpressionsTaskBase {
async run ( ) {
const parentResults = await this . parentTask ;
const faceExpressionsByFace = await extractAllFacesAndComputeResults ( parentResults , this . input , async ( faces ) => await Promise . all ( faces . map ( ( face ) => nets . faceExpressionNet . predictExpressions ( face ) ) ) , this . extractedFaces ) ;
return parentResults . map ( ( parentResult , i ) => extendWithFaceExpressions ( parentResult , faceExpressionsByFace [ i ] ) ) ;
}
withAgeAndGender ( ) {
return new PredictAllAgeAndGenderTask ( this , this . input ) ;
}
}
class PredictSingleFaceExpressionsTask extends PredictFaceExpressionsTaskBase {
async run ( ) {
const parentResult = await this . parentTask ;
if ( ! parentResult ) {
return ;
}
const faceExpressions = await extractSingleFaceAndComputeResult ( parentResult , this . input , ( face ) => nets . faceExpressionNet . predictExpressions ( face ) , this . extractedFaces ) ;
return extendWithFaceExpressions ( parentResult , faceExpressions ) ;
}
withAgeAndGender ( ) {
return new PredictSingleAgeAndGenderTask ( this , this . input ) ;
}
}
class PredictAllFaceExpressionsWithFaceAlignmentTask extends PredictAllFaceExpressionsTask {
withAgeAndGender ( ) {
return new PredictAllAgeAndGenderWithFaceAlignmentTask ( this , this . input ) ;
}
withFaceDescriptors ( ) {
return new ComputeAllFaceDescriptorsTask ( this , this . input ) ;
}
}
class PredictSingleFaceExpressionsWithFaceAlignmentTask extends PredictSingleFaceExpressionsTask {
withAgeAndGender ( ) {
return new PredictSingleAgeAndGenderWithFaceAlignmentTask ( this , this . input ) ;
}
withFaceDescriptor ( ) {
return new ComputeSingleFaceDescriptorTask ( this , this . input ) ;
}
}
// build/src/globalApi/PredictAgeAndGenderTask.js
class PredictAgeAndGenderTaskBase extends ComposableTask {
constructor ( parentTask , input , extractedFaces ) {
super ( ) ;
this . parentTask = parentTask ;
this . input = input ;
this . extractedFaces = extractedFaces ;
}
}
class PredictAllAgeAndGenderTask extends PredictAgeAndGenderTaskBase {
async run ( ) {
const parentResults = await this . parentTask ;
const ageAndGenderByFace = await extractAllFacesAndComputeResults ( parentResults , this . input , async ( faces ) => await Promise . all ( faces . map ( ( face ) => nets . ageGenderNet . predictAgeAndGender ( face ) ) ) , this . extractedFaces ) ;
return parentResults . map ( ( parentResult , i ) => {
const { age , gender , genderProbability } = ageAndGenderByFace [ i ] ;
return extendWithAge ( extendWithGender ( parentResult , gender , genderProbability ) , age ) ;
} ) ;
}
withFaceExpressions ( ) {
return new PredictAllFaceExpressionsTask ( this , this . input ) ;
}
}
class PredictSingleAgeAndGenderTask extends PredictAgeAndGenderTaskBase {
async run ( ) {
const parentResult = await this . parentTask ;
if ( ! parentResult ) {
return ;
}
const { age , gender , genderProbability } = await extractSingleFaceAndComputeResult ( parentResult , this . input , ( face ) => nets . ageGenderNet . predictAgeAndGender ( face ) , this . extractedFaces ) ;
return extendWithAge ( extendWithGender ( parentResult , gender , genderProbability ) , age ) ;
}
withFaceExpressions ( ) {
return new PredictSingleFaceExpressionsTask ( this , this . input ) ;
}
}
class PredictAllAgeAndGenderWithFaceAlignmentTask extends PredictAllAgeAndGenderTask {
withFaceExpressions ( ) {
return new PredictAllFaceExpressionsWithFaceAlignmentTask ( this , this . input ) ;
}
withFaceDescriptors ( ) {
return new ComputeAllFaceDescriptorsTask ( this , this . input ) ;
}
}
class PredictSingleAgeAndGenderWithFaceAlignmentTask extends PredictSingleAgeAndGenderTask {
withFaceExpressions ( ) {
return new PredictSingleFaceExpressionsWithFaceAlignmentTask ( this , this . input ) ;
}
withFaceDescriptor ( ) {
return new ComputeSingleFaceDescriptorTask ( this , this . input ) ;
}
}
// build/src/globalApi/ComputeFaceDescriptorsTasks.js
class ComputeFaceDescriptorsTaskBase extends ComposableTask {
constructor ( parentTask , input ) {
super ( ) ;
this . parentTask = parentTask ;
this . input = input ;
}
}
class ComputeAllFaceDescriptorsTask extends ComputeFaceDescriptorsTaskBase {
async run ( ) {
const parentResults = await this . parentTask ;
const descriptors = await extractAllFacesAndComputeResults ( parentResults , this . input , ( faces ) => Promise . all ( faces . map ( ( face ) => nets . faceRecognitionNet . computeFaceDescriptor ( face ) ) ) , null , ( parentResult ) => parentResult . landmarks . align ( null , { useDlibAlignment : true } ) ) ;
return descriptors . map ( ( descriptor , i ) => extendWithFaceDescriptor ( parentResults [ i ] , descriptor ) ) ;
}
withFaceExpressions ( ) {
return new PredictAllFaceExpressionsWithFaceAlignmentTask ( this , this . input ) ;
}
withAgeAndGender ( ) {
return new PredictAllAgeAndGenderWithFaceAlignmentTask ( this , this . input ) ;
}
}
class ComputeSingleFaceDescriptorTask extends ComputeFaceDescriptorsTaskBase {
async run ( ) {
const parentResult = await this . parentTask ;
if ( ! parentResult ) {
return ;
}
const descriptor = await extractSingleFaceAndComputeResult ( parentResult , this . input , ( face ) => nets . faceRecognitionNet . computeFaceDescriptor ( face ) , null , ( parentResult2 ) => parentResult2 . landmarks . align ( null , { useDlibAlignment : true } ) ) ;
return extendWithFaceDescriptor ( parentResult , descriptor ) ;
}
withFaceExpressions ( ) {
return new PredictSingleFaceExpressionsWithFaceAlignmentTask ( this , this . input ) ;
}
withAgeAndGender ( ) {
return new PredictSingleAgeAndGenderWithFaceAlignmentTask ( this , this . input ) ;
}
}
// build/src/globalApi/DetectFaceLandmarksTasks.js
2020-10-09 03:31:31 +02:00
import {
Tensor as Tensor5
} from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
class DetectFaceLandmarksTaskBase extends ComposableTask {
constructor ( parentTask , input , useTinyLandmarkNet ) {
super ( ) ;
this . parentTask = parentTask ;
this . input = input ;
this . useTinyLandmarkNet = useTinyLandmarkNet ;
}
get landmarkNet ( ) {
return this . useTinyLandmarkNet ? nets . faceLandmark68TinyNet : nets . faceLandmark68Net ;
}
}
class DetectAllFaceLandmarksTask extends DetectFaceLandmarksTaskBase {
async run ( ) {
const parentResults = await this . parentTask ;
const detections = parentResults . map ( ( res ) => res . detection ) ;
2020-10-09 03:31:31 +02:00
const faces = this . input instanceof Tensor5 ? await extractFaceTensors ( this . input , detections ) : await extractFaces ( this . input , detections ) ;
2020-09-16 18:46:34 +02:00
const faceLandmarksByFace = await Promise . all ( faces . map ( ( face ) => this . landmarkNet . detectLandmarks ( face ) ) ) ;
2020-10-09 03:31:31 +02:00
faces . forEach ( ( f ) => f instanceof Tensor5 && f . dispose ( ) ) ;
2020-09-16 18:46:34 +02:00
return parentResults . map ( ( parentResult , i ) => extendWithFaceLandmarks ( parentResult , faceLandmarksByFace [ i ] ) ) ;
}
withFaceExpressions ( ) {
return new PredictAllFaceExpressionsWithFaceAlignmentTask ( this , this . input ) ;
}
withAgeAndGender ( ) {
return new PredictAllAgeAndGenderWithFaceAlignmentTask ( this , this . input ) ;
}
withFaceDescriptors ( ) {
return new ComputeAllFaceDescriptorsTask ( this , this . input ) ;
}
}
class DetectSingleFaceLandmarksTask extends DetectFaceLandmarksTaskBase {
async run ( ) {
const parentResult = await this . parentTask ;
if ( ! parentResult ) {
return ;
}
const { detection } = parentResult ;
2020-10-09 03:31:31 +02:00
const faces = this . input instanceof Tensor5 ? await extractFaceTensors ( this . input , [ detection ] ) : await extractFaces ( this . input , [ detection ] ) ;
2020-09-16 18:46:34 +02:00
const landmarks = await this . landmarkNet . detectLandmarks ( faces [ 0 ] ) ;
2020-10-09 03:31:31 +02:00
faces . forEach ( ( f ) => f instanceof Tensor5 && f . dispose ( ) ) ;
2020-09-16 18:46:34 +02:00
return extendWithFaceLandmarks ( parentResult , landmarks ) ;
}
withFaceExpressions ( ) {
return new PredictSingleFaceExpressionsWithFaceAlignmentTask ( this , this . input ) ;
}
withAgeAndGender ( ) {
return new PredictSingleAgeAndGenderWithFaceAlignmentTask ( this , this . input ) ;
}
withFaceDescriptor ( ) {
return new ComputeSingleFaceDescriptorTask ( this , this . input ) ;
}
}
// build/src/globalApi/DetectFacesTasks.js
class DetectFacesTaskBase extends ComposableTask {
constructor ( input , options = new SsdMobilenetv1Options ( ) ) {
super ( ) ;
this . input = input ;
this . options = options ;
}
}
class DetectAllFacesTask extends DetectFacesTaskBase {
async run ( ) {
const { input , options } = this ;
const faceDetectionFunction = options instanceof TinyFaceDetectorOptions ? ( input2 ) => nets . tinyFaceDetector . locateFaces ( input2 , options ) : options instanceof SsdMobilenetv1Options ? ( input2 ) => nets . ssdMobilenetv1 . locateFaces ( input2 , options ) : options instanceof TinyYolov2Options ? ( input2 ) => nets . tinyYolov2 . locateFaces ( input2 , options ) : null ;
if ( ! faceDetectionFunction ) {
throw new Error ( "detectFaces - expected options to be instance of TinyFaceDetectorOptions | SsdMobilenetv1Options | MtcnnOptions | TinyYolov2Options" ) ;
}
return faceDetectionFunction ( input ) ;
}
runAndExtendWithFaceDetections ( ) {
return new Promise ( async ( res ) => {
const detections = await this . run ( ) ;
return res ( detections . map ( ( detection ) => extendWithFaceDetection ( { } , detection ) ) ) ;
} ) ;
}
withFaceLandmarks ( useTinyLandmarkNet = false ) {
return new DetectAllFaceLandmarksTask ( this . runAndExtendWithFaceDetections ( ) , this . input , useTinyLandmarkNet ) ;
}
withFaceExpressions ( ) {
return new PredictAllFaceExpressionsTask ( this . runAndExtendWithFaceDetections ( ) , this . input ) ;
}
withAgeAndGender ( ) {
return new PredictAllAgeAndGenderTask ( this . runAndExtendWithFaceDetections ( ) , this . input ) ;
}
}
class DetectSingleFaceTask extends DetectFacesTaskBase {
async run ( ) {
const faceDetections = await new DetectAllFacesTask ( this . input , this . options ) ;
let faceDetectionWithHighestScore = faceDetections [ 0 ] ;
faceDetections . forEach ( ( faceDetection ) => {
if ( faceDetection . score > faceDetectionWithHighestScore . score ) {
faceDetectionWithHighestScore = faceDetection ;
}
} ) ;
return faceDetectionWithHighestScore ;
}
runAndExtendWithFaceDetection ( ) {
return new Promise ( async ( res ) => {
const detection = await this . run ( ) ;
return res ( detection ? extendWithFaceDetection ( { } , detection ) : void 0 ) ;
} ) ;
}
withFaceLandmarks ( useTinyLandmarkNet = false ) {
return new DetectSingleFaceLandmarksTask ( this . runAndExtendWithFaceDetection ( ) , this . input , useTinyLandmarkNet ) ;
}
withFaceExpressions ( ) {
return new PredictSingleFaceExpressionsTask ( this . runAndExtendWithFaceDetection ( ) , this . input ) ;
}
withAgeAndGender ( ) {
return new PredictSingleAgeAndGenderTask ( this . runAndExtendWithFaceDetection ( ) , this . input ) ;
}
}
// build/src/globalApi/detectFaces.js
function detectSingleFace ( input , options = new SsdMobilenetv1Options ( ) ) {
return new DetectSingleFaceTask ( input , options ) ;
}
function detectAllFaces ( input , options = new SsdMobilenetv1Options ( ) ) {
return new DetectAllFacesTask ( input , options ) ;
}
// build/src/globalApi/allFaces.js
async function allFacesSsdMobilenetv1 ( input , minConfidence ) {
console . warn ( "allFacesSsdMobilenetv1 is deprecated and will be removed soon, use the high level api instead" ) ;
return await detectAllFaces ( input , new SsdMobilenetv1Options ( minConfidence ? { minConfidence } : { } ) ) . withFaceLandmarks ( ) . withFaceDescriptors ( ) ;
}
async function allFacesTinyYolov2 ( input , forwardParams = { } ) {
console . warn ( "allFacesTinyYolov2 is deprecated and will be removed soon, use the high level api instead" ) ;
return await detectAllFaces ( input , new TinyYolov2Options ( forwardParams ) ) . withFaceLandmarks ( ) . withFaceDescriptors ( ) ;
}
const allFaces = allFacesSsdMobilenetv1 ;
// build/src/euclideanDistance.js
function euclideanDistance ( arr1 , arr2 ) {
if ( arr1 . length !== arr2 . length )
throw new Error ( "euclideanDistance: arr1.length !== arr2.length" ) ;
const desc1 = Array . from ( arr1 ) ;
const desc2 = Array . from ( arr2 ) ;
return Math . sqrt ( desc1 . map ( ( val , i ) => val - desc2 [ i ] ) . reduce ( ( res , diff ) => res + Math . pow ( diff , 2 ) , 0 ) ) ;
}
// build/src/globalApi/FaceMatcher.js
class FaceMatcher {
constructor ( inputs , distanceThreshold = 0.6 ) {
this . _distanceThreshold = distanceThreshold ;
const inputArray = Array . isArray ( inputs ) ? inputs : [ inputs ] ;
if ( ! inputArray . length ) {
throw new Error ( ` FaceRecognizer.constructor - expected atleast one input ` ) ;
}
let count = 1 ;
const createUniqueLabel = ( ) => ` person ${ count ++ } ` ;
this . _labeledDescriptors = inputArray . map ( ( desc ) => {
if ( desc instanceof LabeledFaceDescriptors ) {
return desc ;
}
if ( desc instanceof Float32Array ) {
return new LabeledFaceDescriptors ( createUniqueLabel ( ) , [ desc ] ) ;
}
if ( desc . descriptor && desc . descriptor instanceof Float32Array ) {
return new LabeledFaceDescriptors ( createUniqueLabel ( ) , [ desc . descriptor ] ) ;
}
throw new Error ( ` FaceRecognizer.constructor - expected inputs to be of type LabeledFaceDescriptors | WithFaceDescriptor<any> | Float32Array | Array<LabeledFaceDescriptors | WithFaceDescriptor<any> | Float32Array> ` ) ;
} ) ;
}
get labeledDescriptors ( ) {
return this . _labeledDescriptors ;
}
get distanceThreshold ( ) {
return this . _distanceThreshold ;
}
computeMeanDistance ( queryDescriptor , descriptors ) {
return descriptors . map ( ( d ) => euclideanDistance ( d , queryDescriptor ) ) . reduce ( ( d1 , d2 ) => d1 + d2 , 0 ) / ( descriptors . length || 1 ) ;
}
matchDescriptor ( queryDescriptor ) {
return this . labeledDescriptors . map ( ( { descriptors , label } ) => new FaceMatch ( label , this . computeMeanDistance ( queryDescriptor , descriptors ) ) ) . reduce ( ( best , curr ) => best . distance < curr . distance ? best : curr ) ;
}
findBestMatch ( queryDescriptor ) {
const bestMatch = this . matchDescriptor ( queryDescriptor ) ;
return bestMatch . distance < this . distanceThreshold ? bestMatch : new FaceMatch ( "unknown" , bestMatch . distance ) ;
}
toJSON ( ) {
return {
distanceThreshold : this . distanceThreshold ,
labeledDescriptors : this . labeledDescriptors . map ( ( ld ) => ld . toJSON ( ) )
} ;
}
static fromJSON ( json ) {
const labeledDescriptors = json . labeledDescriptors . map ( ( ld ) => LabeledFaceDescriptors . fromJSON ( ld ) ) ;
return new FaceMatcher ( labeledDescriptors , json . distanceThreshold ) ;
}
}
// build/src/globalApi/types.js
// build/src/globalApi/index.js
// build/src/tinyFaceDetector/index.js
function createTinyFaceDetector ( weights ) {
const net = new TinyFaceDetector ( ) ;
net . extractWeights ( weights ) ;
return net ;
}
// build/src/resizeResults.js
function resizeResults ( results , dimensions ) {
const { width , height } = new Dimensions ( dimensions . width , dimensions . height ) ;
if ( width <= 0 || height <= 0 ) {
throw new Error ( ` resizeResults - invalid dimensions: ${ JSON . stringify ( { width , height } )} ` ) ;
}
if ( Array . isArray ( results ) ) {
return results . map ( ( obj ) => resizeResults ( obj , { width , height } ) ) ;
}
if ( isWithFaceLandmarks ( results ) ) {
const resizedDetection = results . detection . forSize ( width , height ) ;
const resizedLandmarks = results . unshiftedLandmarks . forSize ( resizedDetection . box . width , resizedDetection . box . height ) ;
return extendWithFaceLandmarks ( extendWithFaceDetection ( results , resizedDetection ) , resizedLandmarks ) ;
}
if ( isWithFaceDetection ( results ) ) {
return extendWithFaceDetection ( results , results . detection . forSize ( width , height ) ) ;
}
if ( results instanceof FaceLandmarks || results instanceof FaceDetection ) {
return results . forSize ( width , height ) ;
}
return results ;
}
// build/package.json
2020-10-09 03:31:31 +02:00
var version = "0.5.3" ;
2020-09-16 18:46:34 +02:00
// build/src/index.js
2020-10-09 03:31:31 +02:00
import * as tf42 from "@tensorflow/tfjs" ;
2020-09-16 18:46:34 +02:00
const node = typeof process !== "undefined" ? process . version : false ;
2020-10-09 03:31:31 +02:00
const browser3 = typeof navigator !== "undefined" ? navigator . userAgent : false ;
const version2 = { faceapi : version , node , browser : browser3 } ;
2020-09-16 18:46:34 +02:00
export {
AgeGenderNet ,
BoundingBox ,
Box ,
ComposableTask ,
ComputeAllFaceDescriptorsTask ,
ComputeFaceDescriptorsTaskBase ,
ComputeSingleFaceDescriptorTask ,
DetectAllFaceLandmarksTask ,
DetectAllFacesTask ,
DetectFaceLandmarksTaskBase ,
DetectFacesTaskBase ,
DetectSingleFaceLandmarksTask ,
DetectSingleFaceTask ,
Dimensions ,
FACE _EXPRESSION _LABELS ,
FaceDetection ,
FaceDetectionNet ,
FaceExpressionNet ,
FaceExpressions ,
FaceLandmark68Net ,
FaceLandmark68TinyNet ,
FaceLandmarkNet ,
FaceLandmarks ,
FaceLandmarks5 ,
FaceLandmarks68 ,
FaceMatch ,
FaceMatcher ,
FaceRecognitionNet ,
Gender ,
LabeledBox ,
LabeledFaceDescriptors ,
NetInput ,
NeuralNetwork ,
ObjectDetection ,
Point ,
PredictedBox ,
Rect ,
SsdMobilenetv1 ,
SsdMobilenetv1Options ,
TinyFaceDetector ,
TinyFaceDetectorOptions ,
TinyYolov2 ,
TinyYolov2Options ,
TinyYolov2SizeType ,
allFaces ,
allFacesSsdMobilenetv1 ,
allFacesTinyYolov2 ,
awaitMediaLoaded ,
bufferToImage ,
computeFaceDescriptor ,
createCanvas ,
createCanvasFromMedia ,
createFaceDetectionNet ,
createFaceRecognitionNet ,
createSsdMobilenetv1 ,
createTinyFaceDetector ,
createTinyYolov2 ,
detectAllFaces ,
detectFaceLandmarks ,
detectFaceLandmarksTiny ,
detectLandmarks ,
detectSingleFace ,
draw _exports as draw ,
2020-10-09 03:31:31 +02:00
env ,
2020-09-16 18:46:34 +02:00
euclideanDistance ,
extendWithAge ,
extendWithFaceDescriptor ,
extendWithFaceDetection ,
extendWithFaceExpressions ,
extendWithFaceLandmarks ,
extendWithGender ,
extractFaceTensors ,
extractFaces ,
fetchImage ,
fetchJson ,
fetchNetWeights ,
fetchOrThrow ,
getContext2dOrThrow ,
getMediaDimensions ,
imageTensorToCanvas ,
imageToSquare ,
inverseSigmoid ,
iou ,
isMediaElement ,
isMediaLoaded ,
isWithAge ,
isWithFaceDetection ,
isWithFaceExpressions ,
isWithFaceLandmarks ,
isWithGender ,
loadAgeGenderModel ,
loadFaceDetectionModel ,
loadFaceExpressionModel ,
loadFaceLandmarkModel ,
loadFaceLandmarkTinyModel ,
loadFaceRecognitionModel ,
loadSsdMobilenetv1Model ,
loadTinyFaceDetectorModel ,
loadTinyYolov2Model ,
loadWeightMap ,
locateFaces ,
matchDimensions ,
minBbox ,
nets ,
2020-10-09 03:31:31 +02:00
nonMaxSuppression ,
2020-09-16 18:46:34 +02:00
normalize ,
padToSquare ,
predictAgeAndGender ,
recognizeFaceExpressions ,
resizeResults ,
resolveInput ,
shuffleArray ,
2020-10-09 03:31:31 +02:00
sigmoid ,
2020-09-16 18:46:34 +02:00
ssdMobilenetv1 ,
2020-10-09 03:31:31 +02:00
tf42 as tf ,
2020-09-16 18:46:34 +02:00
tinyFaceDetector ,
tinyYolov23 as tinyYolov2 ,
toNetInput ,
utils _exports as utils ,
validateConfig ,
2020-10-09 03:31:31 +02:00
version2 as version
2020-09-16 18:46:34 +02:00
} ;
//# sourceMappingURL=face-api.node.js.map