diff --git a/.eslintrc.json b/.eslintrc.json new file mode 100644 index 0000000..1ab56dc --- /dev/null +++ b/.eslintrc.json @@ -0,0 +1,26 @@ +{ + "globals": {}, + "env": { + "commonjs": true, + "node": true, + "es2020": true + }, + "parserOptions": { + "ecmaVersion": 2020 + }, + "extends": [ + "airbnb-base", + "eslint:recommended", + "plugin:node/recommended", + "plugin:import/errors", + "plugin:import/warnings" + ], + "rules": { + "max-len": [1, 275, 3], + "no-restricted-syntax": "off", + "no-await-in-loop": "off", + "no-continue": "off", + "node/no-unpublished-require": "off", + "radix": "off" + } +} diff --git a/.gitignore b/.gitignore index ee89780..e15093f 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ node_modules +human-service-account.json pnpm-lock.yaml diff --git a/gcp-upload.js b/gcp-upload.js new file mode 100644 index 0000000..0a10925 --- /dev/null +++ b/gcp-upload.js @@ -0,0 +1,62 @@ +/** + * This is internal-only script that uploads all models to Google Cloud Storage bucket + * +*/ + +/* GCP bucket auth init + gcloud iam service-accounts create human-storage-admin + gcloud projects add-iam-policy-binding protean-keyword-350712 --member="serviceAccount:human-storage-admin@protean-keyword-350712.iam.gserviceaccount.com" --role=roles/storage.admin + gcloud iam service-accounts keys create human-service-account.json --iam-account=human-storage-admin@protean-keyword-350712.iam.gserviceaccount.com +*/ + +const fs = require('fs'); +const path = require('path'); +const log = require('@vladmandic/pilogger'); +const { Storage } = require('@google-cloud/storage'); +const authJson = require('./human-service-account.json'); + +const localDir = './models'; +const keyFilename = './human-service-account.json'; +const bucketName = 'human-models'; + +async function main() { + log.headerJson(); + const storage = new Storage({ projectId: authJson.project_id, keyFilename }); + const [buckets] = await storage.getBuckets(); + let bucket = buckets.find((b) => b.name === bucketName); + let bucketMetadata = {}; + if (!bucket) { + [bucket, bucketMetadata] = await storage.createBucket(bucketName); + await storage.bucket(bucketName).makePublic(); + } else { + [bucketMetadata] = await storage.bucket(bucketName).getMetadata(); + } + log.data('bucket metadata:', bucketMetadata); + let [bucketFiles] = await storage.bucket(bucketName).getFiles(); + const dir = fs.readdirSync(localDir); + log.state('enumerating:', { folder: localDir, files: dir.length }); + for (const f of dir) { + // if (f !== 'README.md') continue; + const p = path.join(localDir, f); + const stat = fs.statSync(p); + let bucketFile = bucketFiles.find((each) => each.name === f); + if (bucketFile?.metadata?.metadata?.ctimeMs === stat.ctimeMs.toString()) { + log.data('exists:', { file: p, url: `https://storage.googleapis.com/${bucketName}/${f}`, size: bucketFile.metadata.size }); // link: uploaded.metadata.mediaLink + } else { + [bucketFile] = await storage.bucket(bucketName).upload(path.join(localDir, f), { destination: f, gzip: true, public: true }); + await bucketFile.setMetadata({ metadata: { ctimeMs: stat.ctimeMs.toString() } }); + log.data('upload:', { file: p, url: `https://storage.googleapis.com/${bucketName}/${f}` }); + } + } + for (const f of bucketFiles) { + if (!dir.includes(f.name)) { + await f.delete(); + log.data('delete', f.name); + } + } + [bucketFiles] = await storage.bucket(bucketName).getFiles(); + const totalSize = bucketFiles.reduce((prev, curr) => prev + parseInt(curr.metadata.size), 0); + log.info('bucket info:', { files: bucketFiles.length, size: totalSize }); +} + +main(); diff --git a/models/faceboxes.ts b/models/faceboxes.ts deleted file mode 100644 index 94be524..0000000 --- a/models/faceboxes.ts +++ /dev/null @@ -1,69 +0,0 @@ -import { log } from '../log'; -import * as tf from '../../dist/tfjs.esm.js'; -import * as profile from '../profile'; - -export class FaceBoxes { - enlarge: number; - model: any; - config: any; - - constructor(model, config) { - this.enlarge = 1.1; - this.model = model; - this.config = config; - } - - async estimateFaces(input, config) { - if (config) this.config = config; - const results: Array<{ confidence: number, box: any, boxRaw: any, image: any }> = []; - const resizeT = tf.image.resizeBilinear(input, [this.config.face.detector.inputSize, this.config.face.detector.inputSize]); - const castT = resizeT.toInt(); - let scores; - let boxes; - if (!config.profile) { - const [scoresT, boxesT, numT] = await this.model.executeAsync(castT); - scores = scoresT.dataSync(); - const squeezeT = boxesT.squeeze(); - boxes = squeezeT.arraySync(); - scoresT.dispose(); - boxesT.dispose(); - squeezeT.dispose(); - numT.dispose(); - } else { - const profileData = await tf.profile(() => this.model.executeAsync(castT)); - scores = profileData.result[0].dataSync(); - const squeezeT = profileData.result[1].squeeze(); - boxes = squeezeT.arraySync(); - profileData.result.forEach((t) => t.dispose()); - profile.run('faceboxes', profileData); - } - castT.dispose(); - resizeT.dispose(); - for (const i in boxes) { - if (scores[i] && scores[i] > this.config.face.detector.minConfidence) { - const crop = [boxes[i][0] / this.enlarge, boxes[i][1] / this.enlarge, boxes[i][2] * this.enlarge, boxes[i][3] * this.enlarge]; - const boxRaw = [crop[1], crop[0], (crop[3]) - (crop[1]), (crop[2]) - (crop[0])]; - const box = [ - parseInt((boxRaw[0] * input.shape[2]).toString()), - parseInt((boxRaw[1] * input.shape[1]).toString()), - parseInt((boxRaw[2] * input.shape[2]).toString()), - parseInt((boxRaw[3] * input.shape[1]).toString())]; - const resized = tf.image.cropAndResize(input, [crop], [0], [this.config.face.detector.inputSize, this.config.face.detector.inputSize]); - const image = resized.div([255]); - resized.dispose(); - results.push({ confidence: scores[i], box, boxRaw, image }); - // add mesh, meshRaw, annotations, - } - } - return results; - } -} - -export async function load(config) { - const model = await tf.loadGraphModel(config.face.detector.modelPath); - if (config.debug) log(`load model: ${config.face.detector.modelPath.match(/\/(.*)\./)[1]}`); - const faceboxes = new FaceBoxes(model, config); - if (config.face.mesh.enabled && config.debug) log(`load model: ${config.face.mesh.modelPath.match(/\/(.*)\./)[1]}`); - if (config.face.iris.enabled && config.debug) log(`load model: ${config.face.iris.modelPath.match(/\/(.*)\./)[1]}`); - return faceboxes; -} diff --git a/models/faceboxes.txt b/models/faceboxes.txt deleted file mode 100644 index 58e9c23..0000000 --- a/models/faceboxes.txt +++ /dev/null @@ -1 +0,0 @@ -https://github.com/TropComplique/FaceBoxes-tensorflow diff --git a/package.json b/package.json index 77cec12..67280aa 100644 --- a/package.json +++ b/package.json @@ -1,9 +1,10 @@ { "name": "@vladmandic/human-models", - "description": "Repository for optional models not included in the default Human library", + "description": "Repository for Human models", "license": "MIT", "author": "Vladimir Mandic ", "version": "2.8.0", + "main": "gcp-upload.js", "repository": { "type": "git", "url": "git+https://github.com/vladmandic/human-models.git" @@ -12,18 +13,21 @@ "url": "https://github.com/vladmandic/human-models/issues" }, "homepage": "https://github.com/vladmandic/human-models#readme", - "scripts": {}, + "scripts": { + "start": "node gcp-upload.js" + }, "keywords": [ "human", "human-library", - "face-detection", - "body-tracking", - "hand-tracking", - "tfjs-models", - "tfjs", - "tensorflowjs" + "tfjs-models" ], "dependencies": { - "@vladmandic/human": "^2.7.3" + "@vladmandic/human": "^2.7.3", + "@vladmandic/pilogger": "^0.4.4", + "@google-cloud/storage": "^6.0.1", + "eslint-plugin-import": "^2.26.0", + "eslint": "^8.16.0", + "eslint-config-airbnb-base": "^15.0.0", + "eslint-plugin-node": "^11.1.0" } }