2021-02-08 17:39:09 +01:00
import { log } from '../log' ;
2020-11-18 14:26:28 +01:00
import * as tf from '../../dist/tfjs.esm.js' ;
2021-02-13 15:16:41 +01:00
import * as profile from '../profile' ;
2020-11-13 22:13:35 +01:00
2021-03-18 01:16:40 +01:00
type Tensor = Object ;
type DB = Array < { name : string , source : string , embedding : number [ ] } > ;
2021-02-08 18:47:38 +01:00
let model ;
2020-11-13 22:13:35 +01:00
2021-02-08 17:39:09 +01:00
export async function load ( config ) {
2021-02-08 18:47:38 +01:00
if ( ! model ) {
model = await tf . loadGraphModel ( config . face . embedding . modelPath ) ;
2021-03-02 17:27:42 +01:00
if ( config . debug ) log ( ` load model: ${ config . face . embedding . modelPath . match ( /\/(.*)\./ ) [ 1 ] } ` ) ;
2020-11-13 22:13:35 +01:00
}
2021-02-08 18:47:38 +01:00
return model ;
2020-11-13 22:13:35 +01:00
}
2021-03-18 01:16:40 +01:00
export function simmilarity ( embedding1 , embedding2 , order = 2 ) : number {
2021-02-21 20:46:50 +01:00
if ( ! embedding1 || ! embedding2 ) return 0 ;
2021-02-21 19:34:26 +01:00
if ( embedding1 ? . length === 0 || embedding2 ? . length === 0 ) return 0 ;
2020-11-13 22:13:35 +01:00
if ( embedding1 ? . length !== embedding2 ? . length ) return 0 ;
2021-03-13 00:24:34 +01:00
// general minkowski distance, euclidean distance is limited case where order is 2
2021-03-12 00:26:04 +01:00
const distance = embedding1
. map ( ( val , i ) = > ( Math . abs ( embedding1 [ i ] - embedding2 [ i ] ) * * order ) ) // distance squared
. reduce ( ( sum , now ) = > ( sum + now ) , 0 ) // sum all distances
* * ( 1 / order ) ; // get root of
2021-03-13 00:24:34 +01:00
const res = Math . max ( Math . trunc ( 1000 * ( 1 - distance ) ) / 1000 , 0 ) ;
2021-03-11 19:31:36 +01:00
return res ;
2020-11-13 22:13:35 +01:00
}
2021-03-18 01:16:40 +01:00
export function match ( embedding : Array < number > , db : DB , threshold = 0 ) {
let best = { simmilarity : 0 , name : '' , source : '' , embedding : [ ] as number [ ] } ;
2021-03-15 17:14:48 +01:00
if ( ! embedding || ! db || ! Array . isArray ( embedding ) || ! Array . isArray ( db ) ) return best ;
for ( const f of db ) {
if ( f . embedding && f . name ) {
const perc = simmilarity ( embedding , f . embedding ) ;
if ( perc > threshold && perc > best . simmilarity ) best = { . . . f , simmilarity : perc } ;
}
}
return best ;
}
export function enhance ( input ) : Tensor {
2021-03-12 18:54:08 +01:00
const image = tf . tidy ( ( ) = > {
// input received from detector is already normalized to 0..1
// input is also assumed to be straightened
// const data = tf.image.resizeBilinear(input, [model.inputs[0].shape[2], model.inputs[0].shape[1]], false); // just resize to fit the embedding model
// do a tight crop of image and resize it to fit the model
2021-03-13 00:24:34 +01:00
const box = [ [ 0.05 , 0.15 , 0.85 , 0.85 ] ] ; // empyrical values for top, left, bottom, right
2021-03-12 18:54:08 +01:00
const tensor = input . image || input . tensor ;
2021-03-13 19:47:45 +01:00
if ( ! ( tensor instanceof tf . Tensor ) ) return null ;
2021-03-13 00:24:34 +01:00
const crop = ( tensor . shape . length === 3 )
2021-03-13 19:47:45 +01:00
? tf . image . cropAndResize ( tf . expandDims ( tensor , 0 ) , box , [ 0 ] , [ model . inputs [ 0 ] . shape [ 2 ] , model . inputs [ 0 ] . shape [ 1 ] ] ) // add batch dimension if missing
2021-03-12 18:54:08 +01:00
: tf . image . cropAndResize ( tensor , box , [ 0 ] , [ model . inputs [ 0 ] . shape [ 2 ] , model . inputs [ 0 ] . shape [ 1 ] ] ) ;
// convert to black&white to avoid colorization impact
const rgb = [ 0.2989 , 0.5870 , 0.1140 ] ; // factors for red/green/blue colors when converting to grayscale: https://www.mathworks.com/help/matlab/ref/rgb2gray.html
const [ red , green , blue ] = tf . split ( crop , 3 , 3 ) ;
const redNorm = tf . mul ( red , rgb [ 0 ] ) ;
const greenNorm = tf . mul ( green , rgb [ 1 ] ) ;
const blueNorm = tf . mul ( blue , rgb [ 2 ] ) ;
const grayscale = tf . addN ( [ redNorm , greenNorm , blueNorm ] ) ;
const merge = tf . stack ( [ grayscale , grayscale , grayscale ] , 3 ) . squeeze ( 4 ) ;
2021-03-13 17:26:53 +01:00
/ *
// optional increase image contrast
// or do it per-channel so mean is done on each channel
// or do it based on histogram
const mean = merge . mean ( ) ;
const factor = 5 ;
const contrast = merge . sub ( mean ) . mul ( factor ) . add ( mean ) ;
* /
2021-03-12 18:54:08 +01:00
// normalize brightness from 0..1
const darken = merge . sub ( merge . min ( ) ) ;
const lighten = darken . div ( darken . max ( ) ) ;
return lighten ;
} ) ;
return image ;
}
2021-03-15 17:14:48 +01:00
export async function predict ( input , config ) : Promise < number [ ] > {
if ( ! model ) return [ ] ;
2020-11-13 22:13:35 +01:00
return new Promise ( async ( resolve ) = > {
2021-03-12 18:54:08 +01:00
// let data: Array<[]> = [];
let data : Array < number > = [ ] ;
2020-11-13 22:13:35 +01:00
if ( config . face . embedding . enabled ) {
2021-03-13 17:26:53 +01:00
const image = enhance ( input ) ;
2020-11-13 22:13:35 +01:00
if ( ! config . profile ) {
2021-03-13 17:26:53 +01:00
data = tf . tidy ( ( ) = > {
/ *
// if needed convert from NHWC to NCHW
const nchw = image . transpose ( [ 3 , 0 , 1 , 2 ] ) ;
* /
const res = model . predict ( image ) ;
/ *
// optionally do it twice with flipped image and average results
const res1 = model . predict ( image ) ;
const flipped = tf . image . flipLeftRight ( image ) ;
const res2 = model . predict ( flipped ) ;
const merge = tf . stack ( [ res1 , res2 ] , 2 ) . squeeze ( ) ;
const res = reshape . logSumExp ( 1 ) ;
* /
/ *
// optional normalize outputs with l2 normalization
const scaled = tf . tidy ( ( ) = > {
const l2 = res . norm ( 'euclidean' ) ;
const scale = res . div ( l2 ) ;
return scale ;
} ) ;
* /
// optional reduce feature vector complexity
const reshape = res . reshape ( [ 128 , 2 ] ) ; // split 256 vectors into 128 x 2
const reduce = reshape . logSumExp ( 1 ) ; // reduce 2nd dimension by calculating logSumExp on it
const output : Array < number > = reduce . dataSync ( ) ;
2021-03-15 17:14:48 +01:00
return [ . . . output ] ; // convert typed array to simple array
2021-03-11 19:31:36 +01:00
} ) ;
2020-11-13 22:13:35 +01:00
} else {
2021-03-12 00:26:04 +01:00
const profileData = await tf . profile ( ( ) = > model . predict ( { img_inputs : image } ) ) ;
2020-11-13 22:13:35 +01:00
data = [ . . . profileData . result . dataSync ( ) ] ;
profileData . result . dispose ( ) ;
profile . run ( 'emotion' , profileData ) ;
}
2021-03-15 17:14:48 +01:00
tf . dispose ( image ) ;
2020-11-13 22:13:35 +01:00
}
resolve ( data ) ;
} ) ;
}
2021-03-12 18:54:08 +01:00
/ *
git clone https : //github.com/becauseofAI/MobileFace
cd MobileFace / MobileFace_Identification
mmconvert -- srcFramework mxnet -- inputWeight MobileFace_Identification_V3 - 0000 . params -- inputNetwork MobileFace_Identification_V3 - symbol . json -- inputShape 3 , 112 , 112 -- dstFramework tensorflow -- outputModel saved
saved_model_cli show -- dir saved /
tensorflowjs_converter -- input_format tf_saved_model -- output_format tfjs_graph_model -- saved_model_tags train saved / graph /
~ / d e v / d e t e c t o r / s i g n a t u r e . j s g r a p h /
2021 - 03 - 12 08 :25 : 12 DATA : created on : 2021 - 03 - 12T13 :17 : 11.960 Z
2021 - 03 - 12 08 :25 : 12 INFO : graph model : / h o m e / v l a d o / d e v / f a c e / M o b i l e F a c e / M o b i l e F a c e _ I d e n t i f i c a t i o n / g r a p h / m o d e l . j s o n
2021 - 03 - 12 08 :25 : 12 INFO : size : { unreliable : true , numTensors : 75 , numDataBuffers : 75 , numBytes : 2183192 }
2021 - 03 - 12 08 :25 : 12 INFO : model inputs based on signature
2021 - 03 - 12 08 :25 : 12 INFO : model outputs based on signature
2021 - 03 - 12 08 :25 : 12 DATA : inputs : [ { name : 'data:0' , dtype : 'DT_FLOAT' , shape : [ - 1 , 112 , 112 , 3 , [ length ] : 4 ] } , [ length ] : 1 ]
2021 - 03 - 12 08 :25 : 12 DATA : outputs : [ { id : 0 , name : 'batchnorm0/add_1:0' , dytpe : 'DT_FLOAT' , shape : [ - 1 , 256 , [ length ] : 2 ] } , [ length ] : 1 ]
* /