add gcp upload

main
Vladimir Mandic 2022-05-30 08:59:10 -04:00
parent 189fdb7b6e
commit 21e292890f
6 changed files with 102 additions and 79 deletions

26
.eslintrc.json Normal file
View File

@ -0,0 +1,26 @@
{
"globals": {},
"env": {
"commonjs": true,
"node": true,
"es2020": true
},
"parserOptions": {
"ecmaVersion": 2020
},
"extends": [
"airbnb-base",
"eslint:recommended",
"plugin:node/recommended",
"plugin:import/errors",
"plugin:import/warnings"
],
"rules": {
"max-len": [1, 275, 3],
"no-restricted-syntax": "off",
"no-await-in-loop": "off",
"no-continue": "off",
"node/no-unpublished-require": "off",
"radix": "off"
}
}

1
.gitignore vendored
View File

@ -1,2 +1,3 @@
node_modules
human-service-account.json
pnpm-lock.yaml

62
gcp-upload.js Normal file
View File

@ -0,0 +1,62 @@
/**
* This is internal-only script that uploads all models to Google Cloud Storage bucket
*
*/
/* GCP bucket auth init
gcloud iam service-accounts create human-storage-admin
gcloud projects add-iam-policy-binding protean-keyword-350712 --member="serviceAccount:human-storage-admin@protean-keyword-350712.iam.gserviceaccount.com" --role=roles/storage.admin
gcloud iam service-accounts keys create human-service-account.json --iam-account=human-storage-admin@protean-keyword-350712.iam.gserviceaccount.com
*/
const fs = require('fs');
const path = require('path');
const log = require('@vladmandic/pilogger');
const { Storage } = require('@google-cloud/storage');
const authJson = require('./human-service-account.json');
const localDir = './models';
const keyFilename = './human-service-account.json';
const bucketName = 'human-models';
async function main() {
log.headerJson();
const storage = new Storage({ projectId: authJson.project_id, keyFilename });
const [buckets] = await storage.getBuckets();
let bucket = buckets.find((b) => b.name === bucketName);
let bucketMetadata = {};
if (!bucket) {
[bucket, bucketMetadata] = await storage.createBucket(bucketName);
await storage.bucket(bucketName).makePublic();
} else {
[bucketMetadata] = await storage.bucket(bucketName).getMetadata();
}
log.data('bucket metadata:', bucketMetadata);
let [bucketFiles] = await storage.bucket(bucketName).getFiles();
const dir = fs.readdirSync(localDir);
log.state('enumerating:', { folder: localDir, files: dir.length });
for (const f of dir) {
// if (f !== 'README.md') continue;
const p = path.join(localDir, f);
const stat = fs.statSync(p);
let bucketFile = bucketFiles.find((each) => each.name === f);
if (bucketFile?.metadata?.metadata?.ctimeMs === stat.ctimeMs.toString()) {
log.data('exists:', { file: p, url: `https://storage.googleapis.com/${bucketName}/${f}`, size: bucketFile.metadata.size }); // link: uploaded.metadata.mediaLink
} else {
[bucketFile] = await storage.bucket(bucketName).upload(path.join(localDir, f), { destination: f, gzip: true, public: true });
await bucketFile.setMetadata({ metadata: { ctimeMs: stat.ctimeMs.toString() } });
log.data('upload:', { file: p, url: `https://storage.googleapis.com/${bucketName}/${f}` });
}
}
for (const f of bucketFiles) {
if (!dir.includes(f.name)) {
await f.delete();
log.data('delete', f.name);
}
}
[bucketFiles] = await storage.bucket(bucketName).getFiles();
const totalSize = bucketFiles.reduce((prev, curr) => prev + parseInt(curr.metadata.size), 0);
log.info('bucket info:', { files: bucketFiles.length, size: totalSize });
}
main();

View File

@ -1,69 +0,0 @@
import { log } from '../log';
import * as tf from '../../dist/tfjs.esm.js';
import * as profile from '../profile';
export class FaceBoxes {
enlarge: number;
model: any;
config: any;
constructor(model, config) {
this.enlarge = 1.1;
this.model = model;
this.config = config;
}
async estimateFaces(input, config) {
if (config) this.config = config;
const results: Array<{ confidence: number, box: any, boxRaw: any, image: any }> = [];
const resizeT = tf.image.resizeBilinear(input, [this.config.face.detector.inputSize, this.config.face.detector.inputSize]);
const castT = resizeT.toInt();
let scores;
let boxes;
if (!config.profile) {
const [scoresT, boxesT, numT] = await this.model.executeAsync(castT);
scores = scoresT.dataSync();
const squeezeT = boxesT.squeeze();
boxes = squeezeT.arraySync();
scoresT.dispose();
boxesT.dispose();
squeezeT.dispose();
numT.dispose();
} else {
const profileData = await tf.profile(() => this.model.executeAsync(castT));
scores = profileData.result[0].dataSync();
const squeezeT = profileData.result[1].squeeze();
boxes = squeezeT.arraySync();
profileData.result.forEach((t) => t.dispose());
profile.run('faceboxes', profileData);
}
castT.dispose();
resizeT.dispose();
for (const i in boxes) {
if (scores[i] && scores[i] > this.config.face.detector.minConfidence) {
const crop = [boxes[i][0] / this.enlarge, boxes[i][1] / this.enlarge, boxes[i][2] * this.enlarge, boxes[i][3] * this.enlarge];
const boxRaw = [crop[1], crop[0], (crop[3]) - (crop[1]), (crop[2]) - (crop[0])];
const box = [
parseInt((boxRaw[0] * input.shape[2]).toString()),
parseInt((boxRaw[1] * input.shape[1]).toString()),
parseInt((boxRaw[2] * input.shape[2]).toString()),
parseInt((boxRaw[3] * input.shape[1]).toString())];
const resized = tf.image.cropAndResize(input, [crop], [0], [this.config.face.detector.inputSize, this.config.face.detector.inputSize]);
const image = resized.div([255]);
resized.dispose();
results.push({ confidence: scores[i], box, boxRaw, image });
// add mesh, meshRaw, annotations,
}
}
return results;
}
}
export async function load(config) {
const model = await tf.loadGraphModel(config.face.detector.modelPath);
if (config.debug) log(`load model: ${config.face.detector.modelPath.match(/\/(.*)\./)[1]}`);
const faceboxes = new FaceBoxes(model, config);
if (config.face.mesh.enabled && config.debug) log(`load model: ${config.face.mesh.modelPath.match(/\/(.*)\./)[1]}`);
if (config.face.iris.enabled && config.debug) log(`load model: ${config.face.iris.modelPath.match(/\/(.*)\./)[1]}`);
return faceboxes;
}

View File

@ -1 +0,0 @@
https://github.com/TropComplique/FaceBoxes-tensorflow

View File

@ -1,9 +1,10 @@
{
"name": "@vladmandic/human-models",
"description": "Repository for optional models not included in the default Human library",
"description": "Repository for Human models",
"license": "MIT",
"author": "Vladimir Mandic <mandic00@live.com>",
"version": "2.8.0",
"main": "gcp-upload.js",
"repository": {
"type": "git",
"url": "git+https://github.com/vladmandic/human-models.git"
@ -12,18 +13,21 @@
"url": "https://github.com/vladmandic/human-models/issues"
},
"homepage": "https://github.com/vladmandic/human-models#readme",
"scripts": {},
"scripts": {
"start": "node gcp-upload.js"
},
"keywords": [
"human",
"human-library",
"face-detection",
"body-tracking",
"hand-tracking",
"tfjs-models",
"tfjs",
"tensorflowjs"
"tfjs-models"
],
"dependencies": {
"@vladmandic/human": "^2.7.3"
"@vladmandic/human": "^2.7.3",
"@vladmandic/pilogger": "^0.4.4",
"@google-cloud/storage": "^6.0.1",
"eslint-plugin-import": "^2.26.0",
"eslint": "^8.16.0",
"eslint-config-airbnb-base": "^15.0.0",
"eslint-plugin-node": "^11.1.0"
}
}