ts linting
parent
9c7a8af603
commit
95d3e96b79
|
@ -1,3 +1,4 @@
|
|||
// @ts-nocheck
|
||||
import * as faceapi from '../dist/face-api.esm.js';
|
||||
|
||||
// configuration options
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
// @ts-nocheck
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const log = require('@vladmandic/pilogger');
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
// @ts-nocheck
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const log = require('@vladmandic/pilogger'); // this is my simple logger with few extra features
|
||||
|
|
|
@ -1,3 +1,5 @@
|
|||
// @ts-nocheck
|
||||
|
||||
const fs = require('fs');
|
||||
const path = require('path');
|
||||
const log = require('@vladmandic/pilogger');
|
||||
|
|
|
@ -6,154 +6,160 @@ import { loadWeightMap } from './dom/index';
|
|||
import { env } from './env/index';
|
||||
|
||||
export abstract class NeuralNetwork<TNetParams> {
|
||||
|
||||
protected _params: TNetParams | undefined = undefined
|
||||
|
||||
protected _paramMappings: ParamMapping[] = []
|
||||
|
||||
constructor(protected _name: string) {
|
||||
}
|
||||
private _name: any;
|
||||
|
||||
public get params(): TNetParams | undefined { return this._params }
|
||||
public get paramMappings(): ParamMapping[] { return this._paramMappings }
|
||||
public get isLoaded(): boolean { return !!this.params }
|
||||
public get params(): TNetParams | undefined { return this._params; }
|
||||
|
||||
public get paramMappings(): ParamMapping[] { return this._paramMappings; }
|
||||
|
||||
public get isLoaded(): boolean { return !!this.params; }
|
||||
|
||||
public getParamFromPath(paramPath: string): tf.Tensor {
|
||||
const { obj, objProp } = this.traversePropertyPath(paramPath)
|
||||
return obj[objProp]
|
||||
const { obj, objProp } = this.traversePropertyPath(paramPath);
|
||||
return obj[objProp];
|
||||
}
|
||||
|
||||
public reassignParamFromPath(paramPath: string, tensor: tf.Tensor) {
|
||||
const { obj, objProp } = this.traversePropertyPath(paramPath)
|
||||
obj[objProp].dispose()
|
||||
obj[objProp] = tensor
|
||||
const { obj, objProp } = this.traversePropertyPath(paramPath);
|
||||
obj[objProp].dispose();
|
||||
obj[objProp] = tensor;
|
||||
}
|
||||
|
||||
public getParamList() {
|
||||
return this._paramMappings.map(({ paramPath }) => ({
|
||||
path: paramPath,
|
||||
tensor: this.getParamFromPath(paramPath)
|
||||
}))
|
||||
tensor: this.getParamFromPath(paramPath),
|
||||
}));
|
||||
}
|
||||
|
||||
public getTrainableParams() {
|
||||
return this.getParamList().filter(param => param.tensor instanceof tf.Variable)
|
||||
return this.getParamList().filter((param) => param.tensor instanceof tf.Variable);
|
||||
}
|
||||
|
||||
public getFrozenParams() {
|
||||
return this.getParamList().filter(param => !(param.tensor instanceof tf.Variable))
|
||||
return this.getParamList().filter((param) => !(param.tensor instanceof tf.Variable));
|
||||
}
|
||||
|
||||
public variable() {
|
||||
this.getFrozenParams().forEach(({ path, tensor }) => {
|
||||
this.reassignParamFromPath(path, tensor.variable())
|
||||
})
|
||||
this.reassignParamFromPath(path, tensor.variable());
|
||||
});
|
||||
}
|
||||
|
||||
public freeze() {
|
||||
this.getTrainableParams().forEach(({ path, tensor: variable }) => {
|
||||
const tensor = tf.tensor(variable.dataSync())
|
||||
variable.dispose()
|
||||
this.reassignParamFromPath(path, tensor)
|
||||
})
|
||||
const tensor = tf.tensor(variable.dataSync());
|
||||
variable.dispose();
|
||||
this.reassignParamFromPath(path, tensor);
|
||||
});
|
||||
}
|
||||
|
||||
public dispose(throwOnRedispose: boolean = true) {
|
||||
this.getParamList().forEach(param => {
|
||||
this.getParamList().forEach((param) => {
|
||||
if (throwOnRedispose && param.tensor.isDisposed) {
|
||||
throw new Error(`param tensor has already been disposed for path ${param.path}`)
|
||||
throw new Error(`param tensor has already been disposed for path ${param.path}`);
|
||||
}
|
||||
param.tensor.dispose()
|
||||
})
|
||||
this._params = undefined
|
||||
param.tensor.dispose();
|
||||
});
|
||||
this._params = undefined;
|
||||
}
|
||||
|
||||
public serializeParams(): Float32Array {
|
||||
return new Float32Array(
|
||||
this.getParamList()
|
||||
.map(({ tensor }) => Array.from(tensor.dataSync()) as number[])
|
||||
.reduce((flat, arr) => flat.concat(arr))
|
||||
)
|
||||
.reduce((flat, arr) => flat.concat(arr)),
|
||||
);
|
||||
}
|
||||
|
||||
public async load(weightsOrUrl: Float32Array | string | undefined): Promise<void> {
|
||||
if (weightsOrUrl instanceof Float32Array) {
|
||||
this.extractWeights(weightsOrUrl)
|
||||
return
|
||||
this.extractWeights(weightsOrUrl);
|
||||
return;
|
||||
}
|
||||
|
||||
await this.loadFromUri(weightsOrUrl)
|
||||
await this.loadFromUri(weightsOrUrl);
|
||||
}
|
||||
|
||||
public async loadFromUri(uri: string | undefined) {
|
||||
if (uri && typeof uri !== 'string') {
|
||||
throw new Error(`${this._name}.loadFromUri - expected model uri`)
|
||||
throw new Error(`${this._name}.loadFromUri - expected model uri`);
|
||||
}
|
||||
|
||||
const weightMap = await loadWeightMap(uri, this.getDefaultModelName())
|
||||
this.loadFromWeightMap(weightMap)
|
||||
const weightMap = await loadWeightMap(uri, this.getDefaultModelName());
|
||||
this.loadFromWeightMap(weightMap);
|
||||
}
|
||||
|
||||
public async loadFromDisk(filePath: string | undefined) {
|
||||
if (filePath && typeof filePath !== 'string') {
|
||||
throw new Error(`${this._name}.loadFromDisk - expected model file path`)
|
||||
throw new Error(`${this._name}.loadFromDisk - expected model file path`);
|
||||
}
|
||||
|
||||
const { readFile } = env.getEnv()
|
||||
const { readFile } = env.getEnv();
|
||||
|
||||
const { manifestUri, modelBaseUri } = getModelUris(filePath, this.getDefaultModelName())
|
||||
const { manifestUri, modelBaseUri } = getModelUris(filePath, this.getDefaultModelName());
|
||||
|
||||
const fetchWeightsFromDisk = (filePaths: string[]) => Promise.all(
|
||||
filePaths.map(filePath => readFile(filePath).then(buf => buf.buffer))
|
||||
)
|
||||
const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk)
|
||||
const manifest = JSON.parse((await readFile(manifestUri)).toString())
|
||||
const weightMap = await loadWeights(manifest, modelBaseUri)
|
||||
filePaths.map((fp) => readFile(fp).then((buf) => buf.buffer)),
|
||||
);
|
||||
const loadWeights = tf.io.weightsLoaderFactory(fetchWeightsFromDisk);
|
||||
const manifest = JSON.parse((await readFile(manifestUri)).toString());
|
||||
const weightMap = await loadWeights(manifest, modelBaseUri);
|
||||
|
||||
this.loadFromWeightMap(weightMap)
|
||||
this.loadFromWeightMap(weightMap);
|
||||
}
|
||||
|
||||
public loadFromWeightMap(weightMap: tf.NamedTensorMap) {
|
||||
const {
|
||||
paramMappings,
|
||||
params
|
||||
} = this.extractParamsFromWeigthMap(weightMap)
|
||||
params,
|
||||
} = this.extractParamsFromWeigthMap(weightMap);
|
||||
|
||||
this._paramMappings = paramMappings
|
||||
this._params = params
|
||||
this._paramMappings = paramMappings;
|
||||
this._params = params;
|
||||
}
|
||||
|
||||
public extractWeights(weights: Float32Array) {
|
||||
const {
|
||||
paramMappings,
|
||||
params
|
||||
} = this.extractParams(weights)
|
||||
params,
|
||||
} = this.extractParams(weights);
|
||||
|
||||
this._paramMappings = paramMappings
|
||||
this._params = params
|
||||
this._paramMappings = paramMappings;
|
||||
this._params = params;
|
||||
}
|
||||
|
||||
private traversePropertyPath(paramPath: string) {
|
||||
if (!this.params) {
|
||||
throw new Error(`traversePropertyPath - model has no loaded params`)
|
||||
throw new Error('traversePropertyPath - model has no loaded params');
|
||||
}
|
||||
|
||||
const result = paramPath.split('/').reduce((res: { nextObj: any, obj?: any, objProp?: string }, objProp) => {
|
||||
// eslint-disable-next-line no-prototype-builtins
|
||||
if (!res.nextObj.hasOwnProperty(objProp)) {
|
||||
throw new Error(`traversePropertyPath - object does not have property ${objProp}, for path ${paramPath}`)
|
||||
throw new Error(`traversePropertyPath - object does not have property ${objProp}, for path ${paramPath}`);
|
||||
}
|
||||
|
||||
return { obj: res.nextObj, objProp, nextObj: res.nextObj[objProp] }
|
||||
}, { nextObj: this.params })
|
||||
return { obj: res.nextObj, objProp, nextObj: res.nextObj[objProp] };
|
||||
}, { nextObj: this.params });
|
||||
|
||||
const { obj, objProp } = result
|
||||
const { obj, objProp } = result;
|
||||
if (!obj || !objProp || !(obj[objProp] instanceof tf.Tensor)) {
|
||||
throw new Error(`traversePropertyPath - parameter is not a tensor, for path ${paramPath}`)
|
||||
throw new Error(`traversePropertyPath - parameter is not a tensor, for path ${paramPath}`);
|
||||
}
|
||||
|
||||
return { obj, objProp }
|
||||
return { obj, objProp };
|
||||
}
|
||||
|
||||
protected abstract getDefaultModelName(): string
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
protected abstract extractParamsFromWeigthMap(weightMap: tf.NamedTensorMap): { params: TNetParams, paramMappings: ParamMapping[] }
|
||||
|
||||
// eslint-disable-next-line no-unused-vars
|
||||
protected abstract extractParams(weights: Float32Array): { params: TNetParams, paramMappings: ParamMapping[] }
|
||||
}
|
||||
}
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
export class PlatformBrowser {
|
||||
private textEncoder: TextEncoder;
|
||||
|
||||
// eslint-disable-next-line no-undef
|
||||
fetch(path: string, init?: RequestInit): Promise<Response> {
|
||||
return fetch(path, init);
|
||||
}
|
||||
|
@ -11,14 +12,14 @@ export class PlatformBrowser {
|
|||
|
||||
encode(text: string, encoding: string): Uint8Array {
|
||||
if (encoding !== 'utf-8' && encoding !== 'utf8') {
|
||||
throw new Error(
|
||||
`Browser's encoder only supports utf-8, but got ${encoding}`);
|
||||
throw new Error(`Browser's encoder only supports utf-8, but got ${encoding}`);
|
||||
}
|
||||
if (this.textEncoder == null) {
|
||||
this.textEncoder = new TextEncoder();
|
||||
}
|
||||
return this.textEncoder.encode(text);
|
||||
}
|
||||
|
||||
decode(bytes: Uint8Array, encoding: string): string {
|
||||
return new TextDecoder(encoding).decode(bytes);
|
||||
}
|
||||
|
|
|
@ -1,13 +1,12 @@
|
|||
export function euclideanDistance(arr1: number[] | Float32Array, arr2: number[] | Float32Array) {
|
||||
if (arr1.length !== arr2.length)
|
||||
throw new Error('euclideanDistance: arr1.length !== arr2.length')
|
||||
if (arr1.length !== arr2.length) throw new Error('euclideanDistance: arr1.length !== arr2.length');
|
||||
|
||||
const desc1 = Array.from(arr1)
|
||||
const desc2 = Array.from(arr2)
|
||||
const desc1 = Array.from(arr1);
|
||||
const desc2 = Array.from(arr2);
|
||||
|
||||
return Math.sqrt(
|
||||
desc1
|
||||
.map((val, i) => val - desc2[i])
|
||||
.reduce((res, diff) => res + Math.pow(diff, 2), 0)
|
||||
)
|
||||
}
|
||||
.reduce((res, diff) => res + (diff ** 2), 0),
|
||||
);
|
||||
}
|
||||
|
|
|
@ -5,31 +5,30 @@ import { extendWithFaceDetection, isWithFaceDetection } from './factories/WithFa
|
|||
import { extendWithFaceLandmarks, isWithFaceLandmarks } from './factories/WithFaceLandmarks';
|
||||
|
||||
export function resizeResults<T>(results: T, dimensions: IDimensions): T {
|
||||
|
||||
const { width, height } = new Dimensions(dimensions.width, dimensions.height)
|
||||
const { width, height } = new Dimensions(dimensions.width, dimensions.height);
|
||||
|
||||
if (width <= 0 || height <= 0) {
|
||||
throw new Error(`resizeResults - invalid dimensions: ${JSON.stringify({ width, height })}`)
|
||||
throw new Error(`resizeResults - invalid dimensions: ${JSON.stringify({ width, height })}`);
|
||||
}
|
||||
|
||||
if (Array.isArray(results)) {
|
||||
// return results.map(obj => resizeResults(obj, { width, height })) as any as T
|
||||
return (results as Array<any>).map(obj => resizeResults(obj, { width, height } as IDimensions)) as any as T
|
||||
return (results as Array<any>).map((obj) => resizeResults(obj, { width, height } as IDimensions)) as any as T;
|
||||
}
|
||||
|
||||
if (isWithFaceLandmarks(results)) {
|
||||
const resizedDetection = results.detection.forSize(width, height)
|
||||
const resizedLandmarks = results.unshiftedLandmarks.forSize(resizedDetection.box.width, resizedDetection.box.height)
|
||||
return extendWithFaceLandmarks(extendWithFaceDetection(results, resizedDetection), resizedLandmarks)
|
||||
const resizedDetection = results.detection.forSize(width, height);
|
||||
const resizedLandmarks = results.unshiftedLandmarks.forSize(resizedDetection.box.width, resizedDetection.box.height);
|
||||
return extendWithFaceLandmarks(extendWithFaceDetection(results, resizedDetection), resizedLandmarks);
|
||||
}
|
||||
|
||||
if (isWithFaceDetection(results)) {
|
||||
return extendWithFaceDetection(results, results.detection.forSize(width, height))
|
||||
return extendWithFaceDetection(results, results.detection.forSize(width, height));
|
||||
}
|
||||
|
||||
if (results instanceof FaceLandmarks || results instanceof FaceDetection) {
|
||||
return (results as any).forSize(width, height)
|
||||
return (results as any).forSize(width, height);
|
||||
}
|
||||
|
||||
return results
|
||||
}
|
||||
return results;
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue