implemented image filters

pull/50/head
Vladimir Mandic 2020-10-18 12:12:09 -04:00
parent b146a0a64e
commit c571994d50
21 changed files with 2390 additions and 761 deletions

View File

@ -223,6 +223,23 @@ config = {
scoped: false, // enable scoped runs scoped: false, // enable scoped runs
// some models *may* have memory leaks, this wrapps everything in a local scope at a cost of performance // some models *may* have memory leaks, this wrapps everything in a local scope at a cost of performance
// typically not needed // typically not needed
filter: {
enabled: true, // enable image pre-processing filters
return: true, // return processed canvas imagedata in result
brightness: 0, // range: -1 (darken) to 1 (lighten)
contrast: 0, // range: -1 (reduce contrast) to 1 (increase contrast)
sharpness: 0, // range: 0 (no sharpening) to 1 (maximum sharpening)
blur: 0, // range: 0 (no blur) to N (blur radius in pixels)
saturation: 0, // range: -1 (reduce saturation) to 1 (increase saturation)
hue: 0, // range: 0 (no change) to 360 (hue rotation in degrees)
negative: false, // image negative
sepia: false, // image sepia colors
vintage: false, // image vintage colors
kodachrome: false, // image kodachrome colors
technicolor: false, // image technicolor colors
polaroid: false, // image polaroid camera effect
pixelate: 0, // range: 0 (no pixelate) to N (number of pixels to pixelate)
},
face: { face: {
enabled: true, // controls if specified modul is enabled enabled: true, // controls if specified modul is enabled
// face.enabled is required for all face models: detector, mesh, iris, age, gender, emotion // face.enabled is required for all face models: detector, mesh, iris, age, gender, emotion
@ -352,6 +369,7 @@ result = {
backend, // time to initialize tf backend backend, // time to initialize tf backend
load, // time to load models load, // time to load models
sanity, // time for input verification sanity, // time for input verification
image, // time for image processing
body, // model time body, // model time
hand, // model time hand, // model time
face, // model time face, // model time
@ -416,5 +434,6 @@ Library can also be used on mobile devices
- Body Pose Detection: [**PoseNet**](https://medium.com/tensorflow/real-time-human-pose-estimation-in-the-browser-with-tensorflow-js-7dd0bc881cd5) - Body Pose Detection: [**PoseNet**](https://medium.com/tensorflow/real-time-human-pose-estimation-in-the-browser-with-tensorflow-js-7dd0bc881cd5)
- Age & Gender Prediction: [**SSR-Net**](https://github.com/shamangary/SSR-Net) - Age & Gender Prediction: [**SSR-Net**](https://github.com/shamangary/SSR-Net)
- Emotion Prediction: [**Oarriaga**](https://github.com/oarriaga/face_classification) - Emotion Prediction: [**Oarriaga**](https://github.com/oarriaga/face_classification)
- Image Filters: [**WebGLImageFilter**](https://github.com/phoboslab/WebGLImageFilter)
<hr> <hr>

View File

@ -7,6 +7,23 @@ export default {
scoped: false, // enable scoped runs scoped: false, // enable scoped runs
// some models *may* have memory leaks, this wrapps everything in a local scope at a cost of performance // some models *may* have memory leaks, this wrapps everything in a local scope at a cost of performance
// typically not needed // typically not needed
filter: {
enabled: true, // enable image pre-processing filters
return: true, // return processed canvas imagedata in result
brightness: 0, // range: -1 (darken) to 1 (lighten)
contrast: 0, // range: -1 (reduce contrast) to 1 (increase contrast)
sharpness: 0, // range: 0 (no sharpening) to 1 (maximum sharpening)
blur: 0, // range: 0 (no blur) to N (blur radius in pixels)
saturation: 0, // range: -1 (reduce saturation) to 1 (increase saturation)
hue: 0, // range: 0 (no change) to 360 (hue rotation in degrees)
negative: false, // image negative
sepia: false, // image sepia colors
vintage: false, // image vintage colors
kodachrome: false, // image kodachrome colors
technicolor: false, // image technicolor colors
polaroid: false, // image polaroid camera effect
pixelate: 0, // range: 0 (no pixelate) to N (number of pixels to pixelate)
},
face: { face: {
enabled: true, // controls if specified modul is enabled enabled: true, // controls if specified modul is enabled
// face.enabled is required for all face models: detector, mesh, iris, age, gender, emotion // face.enabled is required for all face models: detector, mesh, iris, age, gender, emotion

View File

@ -27,6 +27,7 @@ const ui = {
// configuration overrides // configuration overrides
const config = { const config = {
backend: 'webgl', // if you want to use 'wasm' backend, enable script load of tf and tf-backend-wasm in index.html backend: 'webgl', // if you want to use 'wasm' backend, enable script load of tf and tf-backend-wasm in index.html
filter: { enabled: true, brightness: 0, contrast: 0, sharpness: 0, blur: 0, saturation: 0, hue: 0, negative: false, sepia: false, vintage: false, kodachrome: false, technicolor: false, polaroid: false, pixelate: 0 },
face: { face: {
enabled: true, enabled: true,
detector: { maxFaces: 10, skipFrames: 10, minConfidence: 0.5, iouThreshold: 0.3, scoreThreshold: 0.7 }, detector: { maxFaces: 10, skipFrames: 10, minConfidence: 0.5, iouThreshold: 0.3, scoreThreshold: 0.7 },
@ -42,6 +43,7 @@ const config = {
// global variables // global variables
let menu; let menu;
let menuFX;
let worker; let worker;
let timeStamp; let timeStamp;
const fps = []; const fps = [];
@ -75,7 +77,8 @@ function drawResults(input, result, canvas) {
// draw image from video // draw image from video
const ctx = canvas.getContext('2d'); const ctx = canvas.getContext('2d');
ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height); if (result.canvas) ctx.drawImage(result.canvas, 0, 0, result.canvas.width, result.canvas.height, 0, 0, canvas.width, canvas.height);
else ctx.drawImage(input, 0, 0, input.width, input.height, 0, 0, canvas.width, canvas.height);
// draw all results // draw all results
draw.face(result.face, canvas, ui, human.facemesh.triangulation); draw.face(result.face, canvas, ui, human.facemesh.triangulation);
draw.body(result.body, canvas, ui); draw.body(result.body, canvas, ui);
@ -193,7 +196,7 @@ async function processImage(input) {
const result = await human.detect(image, config); const result = await human.detect(image, config);
drawResults(image, result, canvas); drawResults(image, result, canvas);
const thumb = document.createElement('canvas'); const thumb = document.createElement('canvas');
thumb.width = (window.innerWidth - menu.width) / (ui.columns + 0.1); thumb.width = window.innerWidth / (ui.columns + 0.1);
thumb.height = canvas.height / (window.innerWidth / thumb.width); thumb.height = canvas.height / (window.innerWidth / thumb.width);
thumb.style.margin = '8px'; thumb.style.margin = '8px';
thumb.style.boxShadow = '4px 4px 4px 0 dimgrey'; thumb.style.boxShadow = '4px 4px 4px 0 dimgrey';
@ -237,11 +240,12 @@ async function detectSampleImages() {
} }
function setupMenu() { function setupMenu() {
menu = new Menu(document.body); menu = new Menu(document.body, '...', { top: '1rem', right: '1rem' });
menu.addTitle('...');
menu.addButton('Start Video', 'Pause Video', (evt) => detectVideo(evt)); menu.addButton('Start Video', 'Pause Video', (evt) => detectVideo(evt));
menu.addButton('Process Images', 'Process Images', () => detectSampleImages()); menu.addButton('Process Images', 'Process Images', () => detectSampleImages());
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menu.addBool('Use Web Worker', ui, 'useWorker');
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">'); menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menu.addLabel('Enabled Models'); menu.addLabel('Enabled Models');
menu.addBool('Face Detect', config.face, 'enabled'); menu.addBool('Face Detect', config.face, 'enabled');
@ -281,18 +285,33 @@ function setupMenu() {
config.hand.iouThreshold = parseFloat(val); config.hand.iouThreshold = parseFloat(val);
}); });
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menu.addLabel('UI Options');
menu.addBool('Use Web Worker', ui, 'useWorker');
menu.addBool('Camera Front/Back', ui, 'facing', () => setupCamera());
menu.addBool('Use 3D Depth', ui, 'useDepth');
menu.addBool('Draw Boxes', ui, 'drawBoxes');
menu.addBool('Draw Points', ui, 'drawPoints');
menu.addBool('Draw Polygons', ui, 'drawPolygons');
menu.addBool('Fill Polygons', ui, 'fillPolygons');
menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">'); menu.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menu.addChart('FPS', 'FPS'); menu.addChart('FPS', 'FPS');
menuFX = new Menu(document.body, '...', { top: '1rem', right: '18rem' });
menuFX.addLabel('UI Options');
menuFX.addBool('Camera Front/Back', ui, 'facing', () => setupCamera());
menuFX.addBool('Use 3D Depth', ui, 'useDepth');
menuFX.addBool('Draw Boxes', ui, 'drawBoxes');
menuFX.addBool('Draw Points', ui, 'drawPoints');
menuFX.addBool('Draw Polygons', ui, 'drawPolygons');
menuFX.addBool('Fill Polygons', ui, 'fillPolygons');
menuFX.addHTML('<hr style="min-width: 200px; border-style: inset; border-color: dimgray">');
menuFX.addLabel('Image Filters');
menuFX.addBool('Enabled', config.filter, 'enabled');
menuFX.addRange('Brightness', config.filter, 'brightness', -1.0, 1.0, 0.05, (val) => config.filter.brightness = parseFloat(val));
menuFX.addRange('Contrast', config.filter, 'contrast', -1.0, 1.0, 0.05, (val) => config.filter.contrast = parseFloat(val));
menuFX.addRange('Sharpness', config.filter, 'sharpness', 0, 1.0, 0.05, (val) => config.filter.sharpness = parseFloat(val));
menuFX.addRange('Blur', config.filter, 'blur', 0, 20, 1, (val) => config.filter.blur = parseInt(val));
menuFX.addRange('Saturation', config.filter, 'saturation', -1.0, 1.0, 0.05, (val) => config.filter.saturation = parseFloat(val));
menuFX.addRange('Hue', config.filter, 'hue', 0, 360, 5, (val) => config.filter.hue = parseInt(val));
menuFX.addRange('Pixelate', config.filter, 'pixelate', 0, 32, 1, (val) => config.filter.pixelate = parseInt(val));
menuFX.addBool('Negative', config.filter, 'negative');
menuFX.addBool('Sepia', config.filter, 'sepia');
menuFX.addBool('Vintage', config.filter, 'vintage');
menuFX.addBool('Kodachrome', config.filter, 'kodachrome');
menuFX.addBool('Technicolor', config.filter, 'technicolor');
menuFX.addBool('Polaroid', config.filter, 'polaroid');
} }
async function main() { async function main() {

View File

@ -1,7 +1,12 @@
let instance = 0;
const css = ` const css = `
.menu-container { display: block; background: darkslategray; position: fixed; top: 0rem; right: 0; width: fit-content; padding: 0 0.8rem 0 0.8rem; line-height: 1.8rem; z-index: 10; max-height: calc(100% - 4rem); box-shadow: 0 0 8px dimgrey; } .menu { position: fixed; top: 0rem; right: 0; width: fit-content; padding: 0 0.8rem 0 0.8rem; line-height: 1.8rem; z-index: 10; max-height: calc(100% - 4rem); box-shadow: 0 0 8px dimgrey; background: darkslategray; }
.menu-container:hover { box-shadow: 0 0 8px lightgrey; } .menu:hover { box-shadow: 0 0 8px lightgrey; }
.menu { display: flex; white-space: nowrap; background: darkslategray; padding: 0.2rem; width: max-content; } .menu-container { display: block; max-height: 100vh; }
.menu-container-fadeout { max-height: 0; overflow: hidden; transition: max-height, 0.5s ease; }
.menu-container-fadein { max-height: 100vh; overflow: hidden; transition: max-height, 0.5s ease; }
.menu-item { display: flex; white-space: nowrap; background: darkslategray; padding: 0.2rem; width: max-content; }
.menu-title { text-align: right; cursor: pointer; } .menu-title { text-align: right; cursor: pointer; }
.menu-hr { margin: 0.2rem; border: 1px solid rgba(0, 0, 0, 0.5) } .menu-hr { margin: 0.2rem; border: 1px solid rgba(0, 0, 0, 0.5) }
.menu-label { padding: 0; } .menu-label { padding: 0; }
@ -33,31 +38,54 @@ function createCSS() {
document.getElementsByTagName('head')[0].appendChild(el); document.getElementsByTagName('head')[0].appendChild(el);
} }
function createElem(parent) { function createMenu(parent, title, position = { top: null, left: null, bottom: null, right: null }) {
const el = document.createElement('div'); const el = document.createElement('div');
el.id = 'menu'; el.id = `menu-${instance}`;
el.className = 'menu-container'; el.className = 'menu';
if (position) {
if (position.top) el.style.top = position.top;
if (position.bottom) el.style.bottom = position.bottom;
if (position.left) el.style.left = position.left;
if (position.right) el.style.right = position.right;
}
const elContainer = document.createElement('div');
elContainer.id = `menu-container-${instance}`;
elContainer.className = 'menu-container menu-container-fadein';
const elTitle = document.createElement('div');
elTitle.className = 'menu-title';
elTitle.id = `menu-title-${instance}`;
elTitle.innerHTML = title;
el.appendChild(elTitle);
elTitle.addEventListener('click', () => {
elContainer.classList.toggle('menu-container-fadeout');
elContainer.classList.toggle('menu-container-fadein');
});
el.appendChild(elContainer);
if (typeof parent === 'object') parent.appendChild(el); if (typeof parent === 'object') parent.appendChild(el);
else document.getElementById(parent).appendChild(el); else document.getElementById(parent).appendChild(el);
return el; return [el, elContainer];
} }
class Menu { class Menu {
constructor(parent) { constructor(parent, title, position) {
createCSS(); createCSS();
this.menu = createElem(parent); [this.menu, this.container] = createMenu(parent, title, position);
this._id = 0; this.id = 0;
this.instance = instance;
instance++;
this._maxFPS = 0; this._maxFPS = 0;
this.hidden = 0; this.hidden = 0;
} }
get newID() { get newID() {
this._id++; this.id++;
return `menu-${this._id}`; return `menu-${this.instance}-${this.id}`;
} }
get ID() { get ID() {
return `menu-${this._id}`; return `menu-${this.instance}-${this.id}`;
} }
get width() { get width() {
@ -77,23 +105,25 @@ class Menu {
el.addEventListener('click', () => { el.addEventListener('click', () => {
this.hidden = !this.hidden; this.hidden = !this.hidden;
const all = document.getElementsByClassName('menu'); const all = document.getElementsByClassName('menu');
for (const item of all) item.style.display = this.hidden ? 'none' : 'flex'; for (const item of all) {
item.style.display = this.hidden ? 'none' : 'flex';
}
}); });
} }
async addLabel(title) { async addLabel(title) {
const el = document.createElement('div'); const el = document.createElement('div');
el.className = 'menu menu-label'; el.className = 'menu-item menu-label';
el.id = this.newID; el.id = this.newID;
el.innerHTML = title; el.innerHTML = title;
this.menu.appendChild(el); this.container.appendChild(el);
} }
async addBool(title, object, variable, callback) { async addBool(title, object, variable, callback) {
const el = document.createElement('div'); const el = document.createElement('div');
el.className = 'menu'; el.className = 'menu-item';
el.innerHTML = `<div class="menu-checkbox"><input class="menu-checkbox" type="checkbox" id="${this.newID}" ${object[variable] ? 'checked' : ''}/><label class="menu-checkbox-label" for="${this.ID}"></label></div>${title}`; el.innerHTML = `<div class="menu-checkbox"><input class="menu-checkbox" type="checkbox" id="${this.newID}" ${object[variable] ? 'checked' : ''}/><label class="menu-checkbox-label" for="${this.ID}"></label></div>${title}`;
this.menu.appendChild(el); this.container.appendChild(el);
el.addEventListener('change', (evt) => { el.addEventListener('change', (evt) => {
object[variable] = evt.target.checked; object[variable] = evt.target.checked;
if (callback) callback(evt.target.checked); if (callback) callback(evt.target.checked);
@ -102,9 +132,9 @@ class Menu {
async addRange(title, object, variable, min, max, step, callback) { async addRange(title, object, variable, min, max, step, callback) {
const el = document.createElement('div'); const el = document.createElement('div');
el.className = 'menu'; el.className = 'menu-item';
el.innerHTML = `<input class="menu-range" type="range" id="${this.newID}" min="${min}" max="${max}" step="${step}" value="${object[variable]}">${title}`; el.innerHTML = `<input class="menu-range" type="range" id="${this.newID}" min="${min}" max="${max}" step="${step}" value="${object[variable]}">${title}`;
this.menu.appendChild(el); this.container.appendChild(el);
el.addEventListener('change', (evt) => { el.addEventListener('change', (evt) => {
object[variable] = evt.target.value; object[variable] = evt.target.value;
evt.target.setAttribute('value', evt.target.value); evt.target.setAttribute('value', evt.target.value);
@ -114,22 +144,22 @@ class Menu {
async addHTML(html) { async addHTML(html) {
const el = document.createElement('div'); const el = document.createElement('div');
el.className = 'menu'; el.className = 'menu-item';
el.id = this.newID; el.id = this.newID;
if (html) el.innerHTML = html; if (html) el.innerHTML = html;
this.menu.appendChild(el); this.container.appendChild(el);
} }
async addButton(titleOn, titleOff, callback) { async addButton(titleOn, titleOff, callback) {
const el = document.createElement('button'); const el = document.createElement('button');
el.className = 'menu menu-button'; el.className = 'menu-item menu-button';
el.style.fontFamily = document.body.style.fontFamily; el.style.fontFamily = document.body.style.fontFamily;
el.style.fontSize = document.body.style.fontSize; el.style.fontSize = document.body.style.fontSize;
el.style.fontVariant = document.body.style.fontVariant; el.style.fontVariant = document.body.style.fontVariant;
el.type = 'button'; el.type = 'button';
el.id = this.newID; el.id = this.newID;
el.innerText = titleOn; el.innerText = titleOn;
this.menu.appendChild(el); this.container.appendChild(el);
el.addEventListener('click', () => { el.addEventListener('click', () => {
if (el.innerText === titleOn) el.innerText = titleOff; if (el.innerText === titleOn) el.innerText = titleOff;
else el.innerText = titleOn; else el.innerText = titleOn;
@ -139,10 +169,10 @@ class Menu {
async addValue(title, val) { async addValue(title, val) {
const el = document.createElement('div'); const el = document.createElement('div');
el.className = 'menu'; el.className = 'menu-item';
el.id = title; el.id = title;
el.innerText = `${title}: ${val}`; el.innerText = `${title}: ${val}`;
this.menu.appendChild(el); this.contaner.appendChild(el);
} }
// eslint-disable-next-line class-methods-use-this // eslint-disable-next-line class-methods-use-this
@ -153,10 +183,10 @@ class Menu {
async addChart(title, id) { async addChart(title, id) {
const el = document.createElement('div'); const el = document.createElement('div');
el.className = 'menu menu-chart-title'; el.className = 'menu-item menu-chart-title';
el.id = this.newID; el.id = this.newID;
el.innerHTML = `${title}<canvas id="menu-canvas-${id}" class="menu-chart-canvas" width="180px" height="40px"></canvas>`; el.innerHTML = `${title}<canvas id="menu-canvas-${id}" class="menu-chart-canvas" width="180px" height="40px"></canvas>`;
this.menu.appendChild(el); this.container.appendChild(el);
} }
// eslint-disable-next-line class-methods-use-this // eslint-disable-next-line class-methods-use-this

817
dist/human.cjs vendored
View File

@ -4609,7 +4609,6 @@ var require_handdetector = __commonJS((exports2) => {
async getBoundingBoxes(input) { async getBoundingBoxes(input) {
const batchedPrediction = this.model.predict(input); const batchedPrediction = this.model.predict(input);
const prediction = batchedPrediction.squeeze(); const prediction = batchedPrediction.squeeze();
console.log(prediction);
const scores = tf2.tidy(() => tf2.sigmoid(tf2.slice(prediction, [0, 0], [-1, 1])).squeeze()); const scores = tf2.tidy(() => tf2.sigmoid(tf2.slice(prediction, [0, 0], [-1, 1])).squeeze());
const rawBoxes = tf2.slice(prediction, [0, 1], [-1, 4]); const rawBoxes = tf2.slice(prediction, [0, 1], [-1, 4]);
const boxes = this.normalizeBoxes(rawBoxes); const boxes = this.normalizeBoxes(rawBoxes);
@ -4966,6 +4965,717 @@ var require_handpose = __commonJS((exports2) => {
exports2.load = load2; exports2.load = load2;
}); });
// src/imagefx.js
var require_imagefx = __commonJS((exports2) => {
const WebGLProgram = function(gl, vertexSource, fragmentSource) {
const _collect = function(source, prefix, collection) {
const r = new RegExp("\\b" + prefix + " \\w+ (\\w+)", "ig");
source.replace(r, (match, name) => {
collection[name] = 0;
return match;
});
};
const _compile = function(gl2, source, type) {
const shader = gl2.createShader(type);
gl2.shaderSource(shader, source);
gl2.compileShader(shader);
if (!gl2.getShaderParameter(shader, gl2.COMPILE_STATUS)) {
throw new Error("Filter: GL compile failed", gl2.getShaderInfoLog(shader));
}
return shader;
};
this.uniform = {};
this.attribute = {};
const _vsh = _compile(gl, vertexSource, gl.VERTEX_SHADER);
const _fsh = _compile(gl, fragmentSource, gl.FRAGMENT_SHADER);
this.id = gl.createProgram();
gl.attachShader(this.id, _vsh);
gl.attachShader(this.id, _fsh);
gl.linkProgram(this.id);
if (!gl.getProgramParameter(this.id, gl.LINK_STATUS)) {
throw new Error("Filter: GL link failed", gl.getProgramInfoLog(this.id));
}
gl.useProgram(this.id);
_collect(vertexSource, "attribute", this.attribute);
for (const a in this.attribute) {
this.attribute[a] = gl.getAttribLocation(this.id, a);
}
_collect(vertexSource, "uniform", this.uniform);
_collect(fragmentSource, "uniform", this.uniform);
for (const u in this.uniform) {
this.uniform[u] = gl.getUniformLocation(this.id, u);
}
};
const WebGLImageFilter = function(params) {
if (!params)
params = {};
let _drawCount = 0;
let _sourceTexture = null;
let _lastInChain = false;
let _currentFramebufferIndex = -1;
let _tempFramebuffers = [null, null];
let _filterChain = [];
let _width = -1;
let _height = -1;
let _vertexBuffer = null;
let _currentProgram = null;
const _canvas = params.canvas || document.createElement("canvas");
const _shaderProgramCache = {};
const gl = _canvas.getContext("webgl") || _canvas.getContext("experimental-webgl");
if (!gl)
throw new Error("Filter: getContext() failed");
this.addFilter = function(name) {
const args = Array.prototype.slice.call(arguments, 1);
const filter = _filter[name];
_filterChain.push({func: filter, args});
};
this.reset = function() {
_filterChain = [];
};
this.apply = function(image) {
_resize(image.width, image.height);
_drawCount = 0;
if (!_sourceTexture)
_sourceTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, _sourceTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
if (_filterChain.length === 0) {
const program = _compileShader(SHADER.FRAGMENT_IDENTITY);
_draw();
return _canvas;
}
for (let i = 0; i < _filterChain.length; i++) {
_lastInChain = i === _filterChain.length - 1;
const f = _filterChain[i];
f.func.apply(this, f.args || []);
}
return _canvas;
};
const _resize = function(width, height) {
if (width === _width && height === _height) {
return;
}
_canvas.width = _width = width;
_canvas.height = _height = height;
if (!_vertexBuffer) {
const vertices = new Float32Array([
-1,
-1,
0,
1,
1,
-1,
1,
1,
-1,
1,
0,
0,
-1,
1,
0,
0,
1,
-1,
1,
1,
1,
1,
1,
0
]);
_vertexBuffer = gl.createBuffer(), gl.bindBuffer(gl.ARRAY_BUFFER, _vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);
}
gl.viewport(0, 0, _width, _height);
_tempFramebuffers = [null, null];
};
const _getTempFramebuffer = function(index) {
_tempFramebuffers[index] = _tempFramebuffers[index] || _createFramebufferTexture(_width, _height);
return _tempFramebuffers[index];
};
const _createFramebufferTexture = function(width, height) {
const fbo = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
const renderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, renderbuffer);
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
gl.bindTexture(gl.TEXTURE_2D, null);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
return {fbo, texture};
};
const _draw = function(flags) {
let source = null;
let target = null;
let flipY = false;
if (_drawCount === 0) {
source = _sourceTexture;
} else {
source = _getTempFramebuffer(_currentFramebufferIndex).texture;
}
_drawCount++;
if (_lastInChain && !(flags & DRAW.INTERMEDIATE)) {
target = null;
flipY = _drawCount % 2 === 0;
} else {
_currentFramebufferIndex = (_currentFramebufferIndex + 1) % 2;
target = _getTempFramebuffer(_currentFramebufferIndex).fbo;
}
gl.bindTexture(gl.TEXTURE_2D, source);
gl.bindFramebuffer(gl.FRAMEBUFFER, target);
gl.uniform1f(_currentProgram.uniform.flipY, flipY ? -1 : 1);
gl.drawArrays(gl.TRIANGLES, 0, 6);
};
const _compileShader = function(fragmentSource) {
if (_shaderProgramCache[fragmentSource]) {
_currentProgram = _shaderProgramCache[fragmentSource];
gl.useProgram(_currentProgram.id);
return _currentProgram;
}
_currentProgram = new WebGLProgram(gl, SHADER.VERTEX_IDENTITY, fragmentSource);
const floatSize = Float32Array.BYTES_PER_ELEMENT;
const vertSize = 4 * floatSize;
gl.enableVertexAttribArray(_currentProgram.attribute.pos);
gl.vertexAttribPointer(_currentProgram.attribute.pos, 2, gl.FLOAT, false, vertSize, 0 * floatSize);
gl.enableVertexAttribArray(_currentProgram.attribute.uv);
gl.vertexAttribPointer(_currentProgram.attribute.uv, 2, gl.FLOAT, false, vertSize, 2 * floatSize);
_shaderProgramCache[fragmentSource] = _currentProgram;
return _currentProgram;
};
let DRAW = {INTERMEDIATE: 1};
let SHADER = {};
SHADER.VERTEX_IDENTITY = [
"precision highp float;",
"attribute vec2 pos;",
"attribute vec2 uv;",
"varying vec2 vUv;",
"uniform float flipY;",
"void main(void) {",
"vUv = uv;",
"gl_Position = vec4(pos.x, pos.y*flipY, 0.0, 1.);",
"}"
].join("\n");
SHADER.FRAGMENT_IDENTITY = [
"precision highp float;",
"varying vec2 vUv;",
"uniform sampler2D texture;",
"void main(void) {",
"gl_FragColor = texture2D(texture, vUv);",
"}"
].join("\n");
let _filter = {};
_filter.colorMatrix = function(matrix) {
const m = new Float32Array(matrix);
m[4] /= 255;
m[9] /= 255;
m[14] /= 255;
m[19] /= 255;
const shader = m[18] === 1 && m[3] === 0 && m[8] === 0 && m[13] === 0 && m[15] === 0 && m[16] === 0 && m[17] === 0 && m[19] === 0 ? _filter.colorMatrix.SHADER.WITHOUT_ALPHA : _filter.colorMatrix.SHADER.WITH_ALPHA;
const program = _compileShader(shader);
gl.uniform1fv(program.uniform.m, m);
_draw();
};
_filter.colorMatrix.SHADER = {};
_filter.colorMatrix.SHADER.WITH_ALPHA = [
"precision highp float;",
"varying vec2 vUv;",
"uniform sampler2D texture;",
"uniform float m[20];",
"void main(void) {",
"vec4 c = texture2D(texture, vUv);",
"gl_FragColor.r = m[0] * c.r + m[1] * c.g + m[2] * c.b + m[3] * c.a + m[4];",
"gl_FragColor.g = m[5] * c.r + m[6] * c.g + m[7] * c.b + m[8] * c.a + m[9];",
"gl_FragColor.b = m[10] * c.r + m[11] * c.g + m[12] * c.b + m[13] * c.a + m[14];",
"gl_FragColor.a = m[15] * c.r + m[16] * c.g + m[17] * c.b + m[18] * c.a + m[19];",
"}"
].join("\n");
_filter.colorMatrix.SHADER.WITHOUT_ALPHA = [
"precision highp float;",
"varying vec2 vUv;",
"uniform sampler2D texture;",
"uniform float m[20];",
"void main(void) {",
"vec4 c = texture2D(texture, vUv);",
"gl_FragColor.r = m[0] * c.r + m[1] * c.g + m[2] * c.b + m[4];",
"gl_FragColor.g = m[5] * c.r + m[6] * c.g + m[7] * c.b + m[9];",
"gl_FragColor.b = m[10] * c.r + m[11] * c.g + m[12] * c.b + m[14];",
"gl_FragColor.a = c.a;",
"}"
].join("\n");
_filter.brightness = function(brightness) {
const b = (brightness || 0) + 1;
_filter.colorMatrix([
b,
0,
0,
0,
0,
0,
b,
0,
0,
0,
0,
0,
b,
0,
0,
0,
0,
0,
1,
0
]);
};
_filter.saturation = function(amount) {
const x = (amount || 0) * 2 / 3 + 1;
const y = (x - 1) * -0.5;
_filter.colorMatrix([
x,
y,
y,
0,
0,
y,
x,
y,
0,
0,
y,
y,
x,
0,
0,
0,
0,
0,
1,
0
]);
};
_filter.desaturate = function() {
_filter.saturation(-1);
};
_filter.contrast = function(amount) {
const v = (amount || 0) + 1;
const o = -128 * (v - 1);
_filter.colorMatrix([
v,
0,
0,
0,
o,
0,
v,
0,
0,
o,
0,
0,
v,
0,
o,
0,
0,
0,
1,
0
]);
};
_filter.negative = function() {
_filter.contrast(-2);
};
_filter.hue = function(rotation) {
rotation = (rotation || 0) / 180 * Math.PI;
const cos = Math.cos(rotation);
const sin = Math.sin(rotation);
const lumR = 0.213;
const lumG = 0.715;
const lumB = 0.072;
_filter.colorMatrix([
lumR + cos * (1 - lumR) + sin * -lumR,
lumG + cos * -lumG + sin * -lumG,
lumB + cos * -lumB + sin * (1 - lumB),
0,
0,
lumR + cos * -lumR + sin * 0.143,
lumG + cos * (1 - lumG) + sin * 0.14,
lumB + cos * -lumB + sin * -0.283,
0,
0,
lumR + cos * -lumR + sin * -(1 - lumR),
lumG + cos * -lumG + sin * lumG,
lumB + cos * (1 - lumB) + sin * lumB,
0,
0,
0,
0,
0,
1,
0
]);
};
_filter.desaturateLuminance = function() {
_filter.colorMatrix([
0.2764723,
0.929708,
0.0938197,
0,
-37.1,
0.2764723,
0.929708,
0.0938197,
0,
-37.1,
0.2764723,
0.929708,
0.0938197,
0,
-37.1,
0,
0,
0,
1,
0
]);
};
_filter.sepia = function() {
_filter.colorMatrix([
0.393,
0.7689999,
0.18899999,
0,
0,
0.349,
0.6859999,
0.16799999,
0,
0,
0.272,
0.5339999,
0.13099999,
0,
0,
0,
0,
0,
1,
0
]);
};
_filter.brownie = function() {
_filter.colorMatrix([
0.5997023498159715,
0.34553243048391263,
-0.2708298674538042,
0,
47.43192855600873,
-0.037703249837783157,
0.8609577587992641,
0.15059552388459913,
0,
-36.96841498319127,
0.24113635128153335,
-0.07441037908422492,
0.44972182064877153,
0,
-7.562075277591283,
0,
0,
0,
1,
0
]);
};
_filter.vintagePinhole = function() {
_filter.colorMatrix([
0.6279345635605994,
0.3202183420819367,
-0.03965408211312453,
0,
9.651285835294123,
0.02578397704808868,
0.6441188644374771,
0.03259127616149294,
0,
7.462829176470591,
0.0466055556782719,
-0.0851232987247891,
0.5241648018700465,
0,
5.159190588235296,
0,
0,
0,
1,
0
]);
};
_filter.kodachrome = function() {
_filter.colorMatrix([
1.1285582396593525,
-0.3967382283601348,
-0.03992559172921793,
0,
63.72958762196502,
-0.16404339962244616,
1.0835251566291304,
-0.05498805115633132,
0,
24.732407896706203,
-0.16786010706155763,
-0.5603416277695248,
1.6014850761964943,
0,
35.62982807460946,
0,
0,
0,
1,
0
]);
};
_filter.technicolor = function() {
_filter.colorMatrix([
1.9125277891456083,
-0.8545344976951645,
-0.09155508482755585,
0,
11.793603434377337,
-0.3087833385928097,
1.7658908555458428,
-0.10601743074722245,
0,
-70.35205161461398,
-0.231103377548616,
-0.7501899197440212,
1.847597816108189,
0,
30.950940869491138,
0,
0,
0,
1,
0
]);
};
_filter.polaroid = function() {
_filter.colorMatrix([
1.438,
-0.062,
-0.062,
0,
0,
-0.122,
1.378,
-0.122,
0,
0,
-0.016,
-0.016,
1.483,
0,
0,
0,
0,
0,
1,
0
]);
};
_filter.shiftToBGR = function() {
_filter.colorMatrix([
0,
0,
1,
0,
0,
0,
1,
0,
0,
0,
1,
0,
0,
0,
0,
0,
0,
0,
1,
0
]);
};
_filter.convolution = function(matrix) {
const m = new Float32Array(matrix);
const pixelSizeX = 1 / _width;
const pixelSizeY = 1 / _height;
const program = _compileShader(_filter.convolution.SHADER);
gl.uniform1fv(program.uniform.m, m);
gl.uniform2f(program.uniform.px, pixelSizeX, pixelSizeY);
_draw();
};
_filter.convolution.SHADER = [
"precision highp float;",
"varying vec2 vUv;",
"uniform sampler2D texture;",
"uniform vec2 px;",
"uniform float m[9];",
"void main(void) {",
"vec4 c11 = texture2D(texture, vUv - px);",
"vec4 c12 = texture2D(texture, vec2(vUv.x, vUv.y - px.y));",
"vec4 c13 = texture2D(texture, vec2(vUv.x + px.x, vUv.y - px.y));",
"vec4 c21 = texture2D(texture, vec2(vUv.x - px.x, vUv.y) );",
"vec4 c22 = texture2D(texture, vUv);",
"vec4 c23 = texture2D(texture, vec2(vUv.x + px.x, vUv.y) );",
"vec4 c31 = texture2D(texture, vec2(vUv.x - px.x, vUv.y + px.y) );",
"vec4 c32 = texture2D(texture, vec2(vUv.x, vUv.y + px.y) );",
"vec4 c33 = texture2D(texture, vUv + px );",
"gl_FragColor = ",
"c11 * m[0] + c12 * m[1] + c22 * m[2] +",
"c21 * m[3] + c22 * m[4] + c23 * m[5] +",
"c31 * m[6] + c32 * m[7] + c33 * m[8];",
"gl_FragColor.a = c22.a;",
"}"
].join("\n");
_filter.detectEdges = function() {
_filter.convolution.call(this, [
0,
1,
0,
1,
-4,
1,
0,
1,
0
]);
};
_filter.sobelX = function() {
_filter.convolution.call(this, [
-1,
0,
1,
-2,
0,
2,
-1,
0,
1
]);
};
_filter.sobelY = function() {
_filter.convolution.call(this, [
-1,
-2,
-1,
0,
0,
0,
1,
2,
1
]);
};
_filter.sharpen = function(amount) {
const a = amount || 1;
_filter.convolution.call(this, [
0,
-1 * a,
0,
-1 * a,
1 + 4 * a,
-1 * a,
0,
-1 * a,
0
]);
};
_filter.emboss = function(size) {
const s = size || 1;
_filter.convolution.call(this, [
-2 * s,
-1 * s,
0,
-1 * s,
1,
1 * s,
0,
1 * s,
2 * s
]);
};
_filter.blur = function(size) {
const blurSizeX = size / 7 / _width;
const blurSizeY = size / 7 / _height;
const program = _compileShader(_filter.blur.SHADER);
gl.uniform2f(program.uniform.px, 0, blurSizeY);
_draw(DRAW.INTERMEDIATE);
gl.uniform2f(program.uniform.px, blurSizeX, 0);
_draw();
};
_filter.blur.SHADER = [
"precision highp float;",
"varying vec2 vUv;",
"uniform sampler2D texture;",
"uniform vec2 px;",
"void main(void) {",
"gl_FragColor = vec4(0.0);",
"gl_FragColor += texture2D(texture, vUv + vec2(-7.0*px.x, -7.0*px.y))*0.0044299121055113265;",
"gl_FragColor += texture2D(texture, vUv + vec2(-6.0*px.x, -6.0*px.y))*0.00895781211794;",
"gl_FragColor += texture2D(texture, vUv + vec2(-5.0*px.x, -5.0*px.y))*0.0215963866053;",
"gl_FragColor += texture2D(texture, vUv + vec2(-4.0*px.x, -4.0*px.y))*0.0443683338718;",
"gl_FragColor += texture2D(texture, vUv + vec2(-3.0*px.x, -3.0*px.y))*0.0776744219933;",
"gl_FragColor += texture2D(texture, vUv + vec2(-2.0*px.x, -2.0*px.y))*0.115876621105;",
"gl_FragColor += texture2D(texture, vUv + vec2(-1.0*px.x, -1.0*px.y))*0.147308056121;",
"gl_FragColor += texture2D(texture, vUv )*0.159576912161;",
"gl_FragColor += texture2D(texture, vUv + vec2( 1.0*px.x, 1.0*px.y))*0.147308056121;",
"gl_FragColor += texture2D(texture, vUv + vec2( 2.0*px.x, 2.0*px.y))*0.115876621105;",
"gl_FragColor += texture2D(texture, vUv + vec2( 3.0*px.x, 3.0*px.y))*0.0776744219933;",
"gl_FragColor += texture2D(texture, vUv + vec2( 4.0*px.x, 4.0*px.y))*0.0443683338718;",
"gl_FragColor += texture2D(texture, vUv + vec2( 5.0*px.x, 5.0*px.y))*0.0215963866053;",
"gl_FragColor += texture2D(texture, vUv + vec2( 6.0*px.x, 6.0*px.y))*0.00895781211794;",
"gl_FragColor += texture2D(texture, vUv + vec2( 7.0*px.x, 7.0*px.y))*0.0044299121055113265;",
"}"
].join("\n");
_filter.pixelate = function(size) {
const blurSizeX = size / _width;
const blurSizeY = size / _height;
const program = _compileShader(_filter.pixelate.SHADER);
gl.uniform2f(program.uniform.size, blurSizeX, blurSizeY);
_draw();
};
_filter.pixelate.SHADER = [
"precision highp float;",
"varying vec2 vUv;",
"uniform vec2 size;",
"uniform sampler2D texture;",
"vec2 pixelate(vec2 coord, vec2 size) {",
"return floor( coord / size ) * size;",
"}",
"void main(void) {",
"gl_FragColor = vec4(0.0);",
"vec2 coord = pixelate(vUv, size);",
"gl_FragColor += texture2D(texture, coord);",
"}"
].join("\n");
};
exports2.Canvas = WebGLImageFilter;
});
// config.js // config.js
var require_config = __commonJS((exports2) => { var require_config = __commonJS((exports2) => {
__export(exports2, { __export(exports2, {
@ -4975,6 +5685,23 @@ var require_config = __commonJS((exports2) => {
backend: "webgl", backend: "webgl",
console: true, console: true,
scoped: false, scoped: false,
filter: {
enabled: true,
return: true,
brightness: 0,
contrast: 0,
sharpness: 0,
blur: 0,
saturation: 0,
hue: 0,
negative: false,
sepia: false,
vintage: false,
kodachrome: false,
technicolor: false,
polaroid: false,
pixelate: 0
},
face: { face: {
enabled: true, enabled: true,
detector: { detector: {
@ -5118,10 +5845,13 @@ const ssrnet = require_ssrnet();
const emotion = require_emotion(); const emotion = require_emotion();
const posenet = require_posenet(); const posenet = require_posenet();
const handpose = require_handpose(); const handpose = require_handpose();
const fxImage = require_imagefx();
const defaults = require_config().default; const defaults = require_config().default;
const app = require_package(); const app = require_package();
let config; let config;
let fx;
let state = "idle"; let state = "idle";
let offscreenCanvas;
const models = { const models = {
facemesh: null, facemesh: null,
posenet: null, posenet: null,
@ -5198,31 +5928,87 @@ function sanity(input) {
async function load(userConfig) { async function load(userConfig) {
if (userConfig) if (userConfig)
config = mergeDeep(defaults, userConfig); config = mergeDeep(defaults, userConfig);
if (config.face.enabled && !models.facemesh) if (config.face.enabled && !models.facemesh) {
log("Load model: Face");
models.facemesh = await facemesh.load(config.face); models.facemesh = await facemesh.load(config.face);
if (config.body.enabled && !models.posenet) }
if (config.body.enabled && !models.posenet) {
log("Load model: Body");
models.posenet = await posenet.load(config.body); models.posenet = await posenet.load(config.body);
if (config.hand.enabled && !models.handpose) }
if (config.hand.enabled && !models.handpose) {
log("Load model: Hand");
models.handpose = await handpose.load(config.hand); models.handpose = await handpose.load(config.hand);
if (config.face.enabled && config.face.age.enabled && !models.age) }
if (config.face.enabled && config.face.age.enabled && !models.age) {
log("Load model: Age");
models.age = await ssrnet.loadAge(config); models.age = await ssrnet.loadAge(config);
if (config.face.enabled && config.face.gender.enabled && !models.gender) }
if (config.face.enabled && config.face.gender.enabled && !models.gender) {
log("Load model: Gender");
models.gender = await ssrnet.loadGender(config); models.gender = await ssrnet.loadGender(config);
if (config.face.enabled && config.face.emotion.enabled && !models.emotion) }
if (config.face.enabled && config.face.emotion.enabled && !models.emotion) {
log("Load model: Emotion");
models.emotion = await emotion.load(config); models.emotion = await emotion.load(config);
}
} }
function tfImage(input) { function tfImage(input) {
let image; let filtered;
if (tf.ENV.flags.IS_BROWSER && config.filter.enabled && !(input instanceof tf.Tensor)) {
const width = input.naturalWidth || input.videoWidth || input.width || input.shape && input.shape[1] > 0;
const height = input.naturalHeight || input.videoHeight || input.Height || input.shape && input.shape[2] > 0;
if (!offscreenCanvas) {
offscreenCanvas = document.createElement("canvas");
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
const ctx = offscreenCanvas.getContext("2d");
ctx.drawImage(input, 0, 0, width, height, 0, 0, offscreenCanvas.width, offscreenCanvas.height);
if (!fx)
fx = new fxImage.Canvas();
else
fx.reset();
fx.addFilter("brightness", config.filter.brightness);
if (config.filter.contrast !== 0)
fx.addFilter("contrast", config.filter.contrast);
if (config.filter.sharpness !== 0)
fx.addFilter("sharpen", config.filter.sharpness);
if (config.filter.blur !== 0)
fx.addFilter("blur", config.filter.blur);
if (config.filter.saturation !== 0)
fx.addFilter("saturation", config.filter.saturation);
if (config.filter.hue !== 0)
fx.addFilter("hue", config.filter.hue);
if (config.filter.negative)
fx.addFilter("negative");
if (config.filter.sepia)
fx.addFilter("sepia");
if (config.filter.vintage)
fx.addFilter("brownie");
if (config.filter.sepia)
fx.addFilter("sepia");
if (config.filter.kodachrome)
fx.addFilter("kodachrome");
if (config.filter.technicolor)
fx.addFilter("technicolor");
if (config.filter.polaroid)
fx.addFilter("polaroid");
if (config.filter.pixelate !== 0)
fx.addFilter("pixelate", config.filter.pixelate);
filtered = fx.apply(offscreenCanvas);
}
let tensor;
if (input instanceof tf.Tensor) { if (input instanceof tf.Tensor) {
image = tf.clone(input); tensor = tf.clone(input);
} else { } else {
const pixels = tf.browser.fromPixels(input); const pixels = tf.browser.fromPixels(filtered || input);
const casted = pixels.toFloat(); const casted = pixels.toFloat();
image = casted.expandDims(0); tensor = casted.expandDims(0);
pixels.dispose(); pixels.dispose();
casted.dispose(); casted.dispose();
} }
return image; return {tensor, canvas: config.filter.return ? filtered : null};
} }
async function detect(input, userConfig = {}) { async function detect(input, userConfig = {}) {
state = "config"; state = "config";
@ -5263,7 +6049,10 @@ async function detect(input, userConfig = {}) {
if (config.scoped) if (config.scoped)
tf.engine().startScope(); tf.engine().startScope();
analyze("Start Detect:"); analyze("Start Detect:");
const imageTensor = tfImage(input); timeStamp = now();
const image = tfImage(input);
perf.image = Math.trunc(now() - timeStamp);
const imageTensor = image.tensor;
state = "run:body"; state = "run:body";
timeStamp = now(); timeStamp = now();
analyze("Start PoseNet"); analyze("Start PoseNet");
@ -5318,7 +6107,7 @@ async function detect(input, userConfig = {}) {
tf.engine().endScope(); tf.engine().endScope();
analyze("End Scope:"); analyze("End Scope:");
perf.total = Math.trunc(now() - timeStart); perf.total = Math.trunc(now() - timeStart);
resolve({face: faceRes, body: poseRes, hand: handRes, performance: perf}); resolve({face: faceRes, body: poseRes, hand: handRes, performance: perf, canvas: image.canvas});
}); });
} }
exports.detect = detect; exports.detect = detect;

26
dist/human.cjs.json vendored
View File

@ -1,7 +1,7 @@
{ {
"inputs": { "inputs": {
"config.js": { "config.js": {
"bytes": 4762, "bytes": 5828,
"imports": [] "imports": []
}, },
"package.json": { "package.json": {
@ -75,7 +75,7 @@
"imports": [] "imports": []
}, },
"src/handpose/handdetector.js": { "src/handpose/handdetector.js": {
"bytes": 4277, "bytes": 4248,
"imports": [ "imports": [
{ {
"path": "src/handpose/box.js" "path": "src/handpose/box.js"
@ -116,7 +116,7 @@
"imports": [] "imports": []
}, },
"src/human.js": { "src/human.js": {
"bytes": 8784, "bytes": 11161,
"imports": [ "imports": [
{ {
"path": "src/facemesh/facemesh.js" "path": "src/facemesh/facemesh.js"
@ -133,6 +133,9 @@
{ {
"path": "src/handpose/handpose.js" "path": "src/handpose/handpose.js"
}, },
{
"path": "src/imagefx.js"
},
{ {
"path": "config.js" "path": "config.js"
}, },
@ -141,6 +144,10 @@
} }
] ]
}, },
"src/imagefx.js": {
"bytes": 19452,
"imports": []
},
"src/posenet/buildParts.js": { "src/posenet/buildParts.js": {
"bytes": 2035, "bytes": 2035,
"imports": [ "imports": [
@ -253,7 +260,7 @@
"dist/human.cjs.map": { "dist/human.cjs.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 215962 "bytes": 251682
}, },
"dist/human.cjs": { "dist/human.cjs": {
"imports": [], "imports": [],
@ -325,7 +332,7 @@
"bytesInOutput": 2813 "bytesInOutput": 2813
}, },
"src/handpose/handdetector.js": { "src/handpose/handdetector.js": {
"bytesInOutput": 4161 "bytesInOutput": 4130
}, },
"src/handpose/keypoints.js": { "src/handpose/keypoints.js": {
"bytesInOutput": 265 "bytesInOutput": 265
@ -339,17 +346,20 @@
"src/handpose/handpose.js": { "src/handpose/handpose.js": {
"bytesInOutput": 2288 "bytesInOutput": 2288
}, },
"src/imagefx.js": {
"bytesInOutput": 20197
},
"config.js": { "config.js": {
"bytesInOutput": 1844 "bytesInOutput": 2173
}, },
"package.json": { "package.json": {
"bytesInOutput": 2778 "bytesInOutput": 2778
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 7694 "bytesInOutput": 9977
} }
}, },
"bytes": 131607 "bytes": 154404
} }
} }
} }

6
dist/human.cjs.map vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -1,7 +1,7 @@
{ {
"inputs": { "inputs": {
"config.js": { "config.js": {
"bytes": 4762, "bytes": 5828,
"imports": [] "imports": []
}, },
"package.json": { "package.json": {
@ -75,7 +75,7 @@
"imports": [] "imports": []
}, },
"src/handpose/handdetector.js": { "src/handpose/handdetector.js": {
"bytes": 4277, "bytes": 4248,
"imports": [ "imports": [
{ {
"path": "src/handpose/box.js" "path": "src/handpose/box.js"
@ -116,7 +116,7 @@
"imports": [] "imports": []
}, },
"src/human.js": { "src/human.js": {
"bytes": 8784, "bytes": 11161,
"imports": [ "imports": [
{ {
"path": "src/facemesh/facemesh.js" "path": "src/facemesh/facemesh.js"
@ -133,6 +133,9 @@
{ {
"path": "src/handpose/handpose.js" "path": "src/handpose/handpose.js"
}, },
{
"path": "src/imagefx.js"
},
{ {
"path": "config.js" "path": "config.js"
}, },
@ -141,6 +144,10 @@
} }
] ]
}, },
"src/imagefx.js": {
"bytes": 19452,
"imports": []
},
"src/posenet/buildParts.js": { "src/posenet/buildParts.js": {
"bytes": 2035, "bytes": 2035,
"imports": [ "imports": [
@ -253,7 +260,7 @@
"dist/human.esm-nobundle.js.map": { "dist/human.esm-nobundle.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 194427 "bytes": 227481
}, },
"dist/human.esm-nobundle.js": { "dist/human.esm-nobundle.js": {
"imports": [], "imports": [],
@ -265,13 +272,13 @@
"bytesInOutput": 1950 "bytesInOutput": 1950
}, },
"src/facemesh/box.js": { "src/facemesh/box.js": {
"bytesInOutput": 1026 "bytesInOutput": 1033
}, },
"src/facemesh/util.js": { "src/facemesh/util.js": {
"bytesInOutput": 1176 "bytesInOutput": 1183
}, },
"src/facemesh/pipeline.js": { "src/facemesh/pipeline.js": {
"bytesInOutput": 5593 "bytesInOutput": 5576
}, },
"src/facemesh/uvcoords.js": { "src/facemesh/uvcoords.js": {
"bytesInOutput": 16790 "bytesInOutput": 16790
@ -280,7 +287,7 @@
"bytesInOutput": 9995 "bytesInOutput": 9995
}, },
"src/facemesh/facemesh.js": { "src/facemesh/facemesh.js": {
"bytesInOutput": 1259 "bytesInOutput": 1264
}, },
"src/ssrnet/ssrnet.js": { "src/ssrnet/ssrnet.js": {
"bytesInOutput": 934 "bytesInOutput": 934
@ -307,7 +314,7 @@
"bytesInOutput": 612 "bytesInOutput": 612
}, },
"src/posenet/decodePose.js": { "src/posenet/decodePose.js": {
"bytesInOutput": 1021 "bytesInOutput": 1028
}, },
"src/posenet/decodeMultiple.js": { "src/posenet/decodeMultiple.js": {
"bytesInOutput": 608 "bytesInOutput": 608
@ -325,31 +332,34 @@
"bytesInOutput": 1400 "bytesInOutput": 1400
}, },
"src/handpose/handdetector.js": { "src/handpose/handdetector.js": {
"bytesInOutput": 2067 "bytesInOutput": 2046
}, },
"src/handpose/keypoints.js": { "src/handpose/keypoints.js": {
"bytesInOutput": 160 "bytesInOutput": 160
}, },
"src/handpose/util.js": { "src/handpose/util.js": {
"bytesInOutput": 977 "bytesInOutput": 984
}, },
"src/handpose/pipeline.js": { "src/handpose/pipeline.js": {
"bytesInOutput": 3209 "bytesInOutput": 3216
}, },
"src/handpose/handpose.js": { "src/handpose/handpose.js": {
"bytesInOutput": 1211 "bytesInOutput": 1211
}, },
"src/imagefx.js": {
"bytesInOutput": 11088
},
"config.js": { "config.js": {
"bytesInOutput": 1130 "bytesInOutput": 1306
}, },
"package.json": { "package.json": {
"bytesInOutput": 2305 "bytesInOutput": 2305
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 4349 "bytesInOutput": 5719
} }
}, },
"bytes": 68570 "bytes": 81206
} }
} }
} }

645
dist/human.esm.js vendored

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

26
dist/human.esm.json vendored
View File

@ -1,7 +1,7 @@
{ {
"inputs": { "inputs": {
"config.js": { "config.js": {
"bytes": 4762, "bytes": 5828,
"imports": [] "imports": []
}, },
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": { "node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
@ -241,7 +241,7 @@
] ]
}, },
"src/handpose/handdetector.js": { "src/handpose/handdetector.js": {
"bytes": 4277, "bytes": 4248,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -291,7 +291,7 @@
"imports": [] "imports": []
}, },
"src/human.js": { "src/human.js": {
"bytes": 8784, "bytes": 11161,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -311,6 +311,9 @@
{ {
"path": "src/handpose/handpose.js" "path": "src/handpose/handpose.js"
}, },
{
"path": "src/imagefx.js"
},
{ {
"path": "config.js" "path": "config.js"
}, },
@ -319,6 +322,10 @@
} }
] ]
}, },
"src/imagefx.js": {
"bytes": 19452,
"imports": []
},
"src/posenet/buildParts.js": { "src/posenet/buildParts.js": {
"bytes": 2035, "bytes": 2035,
"imports": [ "imports": [
@ -461,7 +468,7 @@
"dist/human.esm.js.map": { "dist/human.esm.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 4955479 "bytes": 4988533
}, },
"dist/human.esm.js": { "dist/human.esm.js": {
"imports": [], "imports": [],
@ -590,7 +597,7 @@
"bytesInOutput": 1386 "bytesInOutput": 1386
}, },
"src/handpose/handdetector.js": { "src/handpose/handdetector.js": {
"bytesInOutput": 2073 "bytesInOutput": 2052
}, },
"src/handpose/keypoints.js": { "src/handpose/keypoints.js": {
"bytesInOutput": 161 "bytesInOutput": 161
@ -604,17 +611,20 @@
"src/handpose/handpose.js": { "src/handpose/handpose.js": {
"bytesInOutput": 1189 "bytesInOutput": 1189
}, },
"src/imagefx.js": {
"bytesInOutput": 11089
},
"config.js": { "config.js": {
"bytesInOutput": 1131 "bytesInOutput": 1307
}, },
"package.json": { "package.json": {
"bytesInOutput": 2306 "bytesInOutput": 2306
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 4464 "bytesInOutput": 5876
} }
}, },
"bytes": 1105498 "bytes": 1118154
} }
} }
} }

645
dist/human.js vendored

File diff suppressed because one or more lines are too long

6
dist/human.js.map vendored

File diff suppressed because one or more lines are too long

26
dist/human.json vendored
View File

@ -1,7 +1,7 @@
{ {
"inputs": { "inputs": {
"config.js": { "config.js": {
"bytes": 4762, "bytes": 5828,
"imports": [] "imports": []
}, },
"node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": { "node_modules/@tensorflow/tfjs-backend-cpu/dist/tf-backend-cpu.node.js": {
@ -241,7 +241,7 @@
] ]
}, },
"src/handpose/handdetector.js": { "src/handpose/handdetector.js": {
"bytes": 4277, "bytes": 4248,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -291,7 +291,7 @@
"imports": [] "imports": []
}, },
"src/human.js": { "src/human.js": {
"bytes": 8784, "bytes": 11161,
"imports": [ "imports": [
{ {
"path": "node_modules/@tensorflow/tfjs/dist/tf.node.js" "path": "node_modules/@tensorflow/tfjs/dist/tf.node.js"
@ -311,6 +311,9 @@
{ {
"path": "src/handpose/handpose.js" "path": "src/handpose/handpose.js"
}, },
{
"path": "src/imagefx.js"
},
{ {
"path": "config.js" "path": "config.js"
}, },
@ -319,6 +322,10 @@
} }
] ]
}, },
"src/imagefx.js": {
"bytes": 19452,
"imports": []
},
"src/posenet/buildParts.js": { "src/posenet/buildParts.js": {
"bytes": 2035, "bytes": 2035,
"imports": [ "imports": [
@ -461,7 +468,7 @@
"dist/human.js.map": { "dist/human.js.map": {
"imports": [], "imports": [],
"inputs": {}, "inputs": {},
"bytes": 4955479 "bytes": 4988533
}, },
"dist/human.js": { "dist/human.js": {
"imports": [], "imports": [],
@ -590,7 +597,7 @@
"bytesInOutput": 1386 "bytesInOutput": 1386
}, },
"src/handpose/handdetector.js": { "src/handpose/handdetector.js": {
"bytesInOutput": 2073 "bytesInOutput": 2052
}, },
"src/handpose/keypoints.js": { "src/handpose/keypoints.js": {
"bytesInOutput": 161 "bytesInOutput": 161
@ -604,17 +611,20 @@
"src/handpose/handpose.js": { "src/handpose/handpose.js": {
"bytesInOutput": 1189 "bytesInOutput": 1189
}, },
"src/imagefx.js": {
"bytesInOutput": 11089
},
"config.js": { "config.js": {
"bytesInOutput": 1131 "bytesInOutput": 1307
}, },
"package.json": { "package.json": {
"bytesInOutput": 2306 "bytesInOutput": 2306
}, },
"src/human.js": { "src/human.js": {
"bytesInOutput": 4464 "bytesInOutput": 5876
} }
}, },
"bytes": 1105507 "bytes": 1118163
} }
} }
} }

View File

@ -37,7 +37,7 @@
"rimraf": "^3.0.2" "rimraf": "^3.0.2"
}, },
"scripts": { "scripts": {
"start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation demo/node.js", "start": "node --trace-warnings --unhandled-rejections=strict --trace-uncaught --no-deprecation src/node.js",
"lint": "eslint src/*.js demo/*.js", "lint": "eslint src/*.js demo/*.js",
"build-iife": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=iife --minify --external:fs --global-name=human --metafile=dist/human.json --outfile=dist/human.js src/human.js", "build-iife": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=iife --minify --external:fs --global-name=human --metafile=dist/human.json --outfile=dist/human.js src/human.js",
"build-esm-bundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:fs --metafile=dist/human.esm.json --outfile=dist/human.esm.js src/human.js", "build-esm-bundle": "esbuild --bundle --platform=browser --sourcemap --target=esnext --format=esm --minify --external:fs --metafile=dist/human.esm.json --outfile=dist/human.esm.js src/human.js",

View File

@ -34,7 +34,6 @@ class HandDetector {
async getBoundingBoxes(input) { async getBoundingBoxes(input) {
const batchedPrediction = this.model.predict(input); const batchedPrediction = this.model.predict(input);
const prediction = batchedPrediction.squeeze(); const prediction = batchedPrediction.squeeze();
console.log(prediction);
// Regression score for each anchor point. // Regression score for each anchor point.
const scores = tf.tidy(() => tf.sigmoid(tf.slice(prediction, [0, 0], [-1, 1])).squeeze()); const scores = tf.tidy(() => tf.sigmoid(tf.slice(prediction, [0, 0], [-1, 1])).squeeze());
// Bounding box for each anchor point. // Bounding box for each anchor point.

View File

@ -4,11 +4,14 @@ const ssrnet = require('./ssrnet/ssrnet.js');
const emotion = require('./emotion/emotion.js'); const emotion = require('./emotion/emotion.js');
const posenet = require('./posenet/posenet.js'); const posenet = require('./posenet/posenet.js');
const handpose = require('./handpose/handpose.js'); const handpose = require('./handpose/handpose.js');
const fxImage = require('./imagefx.js');
const defaults = require('../config.js').default; const defaults = require('../config.js').default;
const app = require('../package.json'); const app = require('../package.json');
let config; let config;
let fx;
let state = 'idle'; let state = 'idle';
let offscreenCanvas;
// object that contains all initialized models // object that contains all initialized models
const models = { const models = {
@ -93,26 +96,75 @@ function sanity(input) {
async function load(userConfig) { async function load(userConfig) {
if (userConfig) config = mergeDeep(defaults, userConfig); if (userConfig) config = mergeDeep(defaults, userConfig);
if (config.face.enabled && !models.facemesh) models.facemesh = await facemesh.load(config.face); if (config.face.enabled && !models.facemesh) {
if (config.body.enabled && !models.posenet) models.posenet = await posenet.load(config.body); log('Load model: Face');
if (config.hand.enabled && !models.handpose) models.handpose = await handpose.load(config.hand); models.facemesh = await facemesh.load(config.face);
if (config.face.enabled && config.face.age.enabled && !models.age) models.age = await ssrnet.loadAge(config); }
if (config.face.enabled && config.face.gender.enabled && !models.gender) models.gender = await ssrnet.loadGender(config); if (config.body.enabled && !models.posenet) {
if (config.face.enabled && config.face.emotion.enabled && !models.emotion) models.emotion = await emotion.load(config); log('Load model: Body');
models.posenet = await posenet.load(config.body);
}
if (config.hand.enabled && !models.handpose) {
log('Load model: Hand');
models.handpose = await handpose.load(config.hand);
}
if (config.face.enabled && config.face.age.enabled && !models.age) {
log('Load model: Age');
models.age = await ssrnet.loadAge(config);
}
if (config.face.enabled && config.face.gender.enabled && !models.gender) {
log('Load model: Gender');
models.gender = await ssrnet.loadGender(config);
}
if (config.face.enabled && config.face.emotion.enabled && !models.emotion) {
log('Load model: Emotion');
models.emotion = await emotion.load(config);
}
} }
function tfImage(input) { function tfImage(input) {
let image; // let imageData;
let filtered;
if (tf.ENV.flags.IS_BROWSER && config.filter.enabled && !(input instanceof tf.Tensor)) {
const width = input.naturalWidth || input.videoWidth || input.width || (input.shape && (input.shape[1] > 0));
const height = input.naturalHeight || input.videoHeight || input.Height || (input.shape && (input.shape[2] > 0));
// if (!offscreenCanvas) offscreenCanvas = new OffscreenCanvas(width, height);
if (!offscreenCanvas) {
offscreenCanvas = document.createElement('canvas');
offscreenCanvas.width = width;
offscreenCanvas.height = height;
}
const ctx = offscreenCanvas.getContext('2d');
ctx.drawImage(input, 0, 0, width, height, 0, 0, offscreenCanvas.width, offscreenCanvas.height);
if (!fx) fx = new fxImage.Canvas();
else fx.reset();
fx.addFilter('brightness', config.filter.brightness); // must have at least one filter enabled
if (config.filter.contrast !== 0) fx.addFilter('contrast', config.filter.contrast);
if (config.filter.sharpness !== 0) fx.addFilter('sharpen', config.filter.sharpness);
if (config.filter.blur !== 0) fx.addFilter('blur', config.filter.blur);
if (config.filter.saturation !== 0) fx.addFilter('saturation', config.filter.saturation);
if (config.filter.hue !== 0) fx.addFilter('hue', config.filter.hue);
if (config.filter.negative) fx.addFilter('negative');
if (config.filter.sepia) fx.addFilter('sepia');
if (config.filter.vintage) fx.addFilter('brownie');
if (config.filter.sepia) fx.addFilter('sepia');
if (config.filter.kodachrome) fx.addFilter('kodachrome');
if (config.filter.technicolor) fx.addFilter('technicolor');
if (config.filter.polaroid) fx.addFilter('polaroid');
if (config.filter.pixelate !== 0) fx.addFilter('pixelate', config.filter.pixelate);
filtered = fx.apply(offscreenCanvas);
}
let tensor;
if (input instanceof tf.Tensor) { if (input instanceof tf.Tensor) {
image = tf.clone(input); tensor = tf.clone(input);
} else { } else {
const pixels = tf.browser.fromPixels(input); const pixels = tf.browser.fromPixels(filtered || input);
const casted = pixels.toFloat(); const casted = pixels.toFloat();
image = casted.expandDims(0); tensor = casted.expandDims(0);
pixels.dispose(); pixels.dispose();
casted.dispose(); casted.dispose();
} }
return image; return { tensor, canvas: config.filter.return ? filtered : null };
} }
async function detect(input, userConfig = {}) { async function detect(input, userConfig = {}) {
@ -167,7 +219,10 @@ async function detect(input, userConfig = {}) {
analyze('Start Detect:'); analyze('Start Detect:');
const imageTensor = tfImage(input); timeStamp = now();
const image = tfImage(input);
perf.image = Math.trunc(now() - timeStamp);
const imageTensor = image.tensor;
// run posenet // run posenet
state = 'run:body'; state = 'run:body';
@ -239,7 +294,7 @@ async function detect(input, userConfig = {}) {
analyze('End Scope:'); analyze('End Scope:');
perf.total = Math.trunc(now() - timeStart); perf.total = Math.trunc(now() - timeStart);
resolve({ face: faceRes, body: poseRes, hand: handRes, performance: perf }); resolve({ face: faceRes, body: poseRes, hand: handRes, performance: perf, canvas: image.canvas });
}); });
} }

608
src/imagefx.js Normal file
View File

@ -0,0 +1,608 @@
/* eslint-disable no-shadow */
/* eslint-disable prefer-rest-params */
/* eslint-disable no-sequences */
/* eslint-disable no-unused-vars */
/* eslint-disable no-unused-expressions */
/* eslint-disable no-multi-assign */
/* eslint-disable no-use-before-define */
/*
WebGLImageFilter - MIT Licensed
2013, Dominic Szablewski - phoboslab.org
*/
const WebGLProgram = function (gl, vertexSource, fragmentSource) {
const _collect = function (source, prefix, collection) {
const r = new RegExp('\\b' + prefix + ' \\w+ (\\w+)', 'ig');
source.replace(r, (match, name) => {
collection[name] = 0;
return match;
});
};
const _compile = function (gl, source, type) {
const shader = gl.createShader(type);
gl.shaderSource(shader, source);
gl.compileShader(shader);
if (!gl.getShaderParameter(shader, gl.COMPILE_STATUS)) {
throw new Error('Filter: GL compile failed', gl.getShaderInfoLog(shader));
}
return shader;
};
this.uniform = {};
this.attribute = {};
const _vsh = _compile(gl, vertexSource, gl.VERTEX_SHADER);
const _fsh = _compile(gl, fragmentSource, gl.FRAGMENT_SHADER);
this.id = gl.createProgram();
gl.attachShader(this.id, _vsh);
gl.attachShader(this.id, _fsh);
gl.linkProgram(this.id);
if (!gl.getProgramParameter(this.id, gl.LINK_STATUS)) {
throw new Error('Filter: GL link failed', gl.getProgramInfoLog(this.id));
}
gl.useProgram(this.id);
// Collect attributes
_collect(vertexSource, 'attribute', this.attribute);
for (const a in this.attribute) {
this.attribute[a] = gl.getAttribLocation(this.id, a);
}
// Collect uniforms
_collect(vertexSource, 'uniform', this.uniform);
_collect(fragmentSource, 'uniform', this.uniform);
for (const u in this.uniform) {
this.uniform[u] = gl.getUniformLocation(this.id, u);
}
};
const WebGLImageFilter = function (params) {
if (!params) params = { };
let _drawCount = 0;
let _sourceTexture = null;
let _lastInChain = false;
let _currentFramebufferIndex = -1;
let _tempFramebuffers = [null, null];
let _filterChain = [];
let _width = -1;
let _height = -1;
let _vertexBuffer = null;
let _currentProgram = null;
const _canvas = params.canvas || document.createElement('canvas');
// key is the shader program source, value is the compiled program
const _shaderProgramCache = { };
const gl = _canvas.getContext('webgl') || _canvas.getContext('experimental-webgl');
if (!gl) throw new Error('Filter: getContext() failed');
this.addFilter = function (name) {
const args = Array.prototype.slice.call(arguments, 1);
const filter = _filter[name];
_filterChain.push({ func: filter, args });
};
this.reset = function () {
_filterChain = [];
};
this.apply = function (image) {
_resize(image.width, image.height);
_drawCount = 0;
// Create the texture for the input image if we haven't yet
if (!_sourceTexture) _sourceTexture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, _sourceTexture);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.NEAREST);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.NEAREST);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, gl.RGBA, gl.UNSIGNED_BYTE, image);
// No filters? Just draw
if (_filterChain.length === 0) {
const program = _compileShader(SHADER.FRAGMENT_IDENTITY);
_draw();
return _canvas;
}
for (let i = 0; i < _filterChain.length; i++) {
_lastInChain = (i === _filterChain.length - 1);
const f = _filterChain[i];
f.func.apply(this, f.args || []);
}
return _canvas;
};
const _resize = function (width, height) {
// Same width/height? Nothing to do here
if (width === _width && height === _height) { return; }
_canvas.width = _width = width;
_canvas.height = _height = height;
// Create the context if we don't have it yet
if (!_vertexBuffer) {
// Create the vertex buffer for the two triangles [x, y, u, v] * 6
const vertices = new Float32Array([
-1, -1, 0, 1, 1, -1, 1, 1, -1, 1, 0, 0,
-1, 1, 0, 0, 1, -1, 1, 1, 1, 1, 1, 0,
]);
_vertexBuffer = gl.createBuffer(),
gl.bindBuffer(gl.ARRAY_BUFFER, _vertexBuffer);
gl.bufferData(gl.ARRAY_BUFFER, vertices, gl.STATIC_DRAW);
// Note sure if this is a good idea; at least it makes texture loading
// in Ejecta instant.
gl.pixelStorei(gl.UNPACK_PREMULTIPLY_ALPHA_WEBGL, true);
}
gl.viewport(0, 0, _width, _height);
// Delete old temp framebuffers
_tempFramebuffers = [null, null];
};
const _getTempFramebuffer = function (index) {
_tempFramebuffers[index] = _tempFramebuffers[index]
|| _createFramebufferTexture(_width, _height);
return _tempFramebuffers[index];
};
const _createFramebufferTexture = function (width, height) {
const fbo = gl.createFramebuffer();
gl.bindFramebuffer(gl.FRAMEBUFFER, fbo);
const renderbuffer = gl.createRenderbuffer();
gl.bindRenderbuffer(gl.RENDERBUFFER, renderbuffer);
const texture = gl.createTexture();
gl.bindTexture(gl.TEXTURE_2D, texture);
gl.texImage2D(gl.TEXTURE_2D, 0, gl.RGBA, width, height, 0, gl.RGBA, gl.UNSIGNED_BYTE, null);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MAG_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_MIN_FILTER, gl.LINEAR);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_S, gl.CLAMP_TO_EDGE);
gl.texParameteri(gl.TEXTURE_2D, gl.TEXTURE_WRAP_T, gl.CLAMP_TO_EDGE);
gl.framebufferTexture2D(gl.FRAMEBUFFER, gl.COLOR_ATTACHMENT0, gl.TEXTURE_2D, texture, 0);
gl.bindTexture(gl.TEXTURE_2D, null);
gl.bindFramebuffer(gl.FRAMEBUFFER, null);
return { fbo, texture };
};
const _draw = function (flags) {
let source = null;
let target = null;
let flipY = false;
// Set up the source
if (_drawCount === 0) {
// First draw call - use the source texture
source = _sourceTexture;
} else {
// All following draw calls use the temp buffer last drawn to
source = _getTempFramebuffer(_currentFramebufferIndex).texture;
}
_drawCount++;
// Set up the target
if (_lastInChain && !(flags & DRAW.INTERMEDIATE)) {
// Last filter in our chain - draw directly to the WebGL Canvas. We may
// also have to flip the image vertically now
target = null;
flipY = _drawCount % 2 === 0;
} else {
// Intermediate draw call - get a temp buffer to draw to
_currentFramebufferIndex = (_currentFramebufferIndex + 1) % 2;
target = _getTempFramebuffer(_currentFramebufferIndex).fbo;
}
// Bind the source and target and draw the two triangles
gl.bindTexture(gl.TEXTURE_2D, source);
gl.bindFramebuffer(gl.FRAMEBUFFER, target);
gl.uniform1f(_currentProgram.uniform.flipY, (flipY ? -1 : 1));
gl.drawArrays(gl.TRIANGLES, 0, 6);
};
const _compileShader = function (fragmentSource) {
if (_shaderProgramCache[fragmentSource]) {
_currentProgram = _shaderProgramCache[fragmentSource];
gl.useProgram(_currentProgram.id);
return _currentProgram;
}
// Compile shaders
_currentProgram = new WebGLProgram(gl, SHADER.VERTEX_IDENTITY, fragmentSource);
const floatSize = Float32Array.BYTES_PER_ELEMENT;
const vertSize = 4 * floatSize;
gl.enableVertexAttribArray(_currentProgram.attribute.pos);
gl.vertexAttribPointer(_currentProgram.attribute.pos, 2, gl.FLOAT, false, vertSize, 0 * floatSize);
gl.enableVertexAttribArray(_currentProgram.attribute.uv);
gl.vertexAttribPointer(_currentProgram.attribute.uv, 2, gl.FLOAT, false, vertSize, 2 * floatSize);
_shaderProgramCache[fragmentSource] = _currentProgram;
return _currentProgram;
};
let DRAW = { INTERMEDIATE: 1 };
let SHADER = {};
SHADER.VERTEX_IDENTITY = [
'precision highp float;',
'attribute vec2 pos;',
'attribute vec2 uv;',
'varying vec2 vUv;',
'uniform float flipY;',
'void main(void) {',
'vUv = uv;',
'gl_Position = vec4(pos.x, pos.y*flipY, 0.0, 1.);',
'}',
].join('\n');
SHADER.FRAGMENT_IDENTITY = [
'precision highp float;',
'varying vec2 vUv;',
'uniform sampler2D texture;',
'void main(void) {',
'gl_FragColor = texture2D(texture, vUv);',
'}',
].join('\n');
let _filter = {};
// -------------------------------------------------------------------------
// Color Matrix Filter
_filter.colorMatrix = function (matrix) {
// Create a Float32 Array and normalize the offset component to 0-1
const m = new Float32Array(matrix);
m[4] /= 255;
m[9] /= 255;
m[14] /= 255;
m[19] /= 255;
// Can we ignore the alpha value? Makes things a bit faster.
const shader = (m[18] === 1 && m[3] === 0 && m[8] === 0 && m[13] === 0 && m[15] === 0 && m[16] === 0 && m[17] === 0 && m[19] === 0)
? _filter.colorMatrix.SHADER.WITHOUT_ALPHA
: _filter.colorMatrix.SHADER.WITH_ALPHA;
const program = _compileShader(shader);
gl.uniform1fv(program.uniform.m, m);
_draw();
};
_filter.colorMatrix.SHADER = {};
_filter.colorMatrix.SHADER.WITH_ALPHA = [
'precision highp float;',
'varying vec2 vUv;',
'uniform sampler2D texture;',
'uniform float m[20];',
'void main(void) {',
'vec4 c = texture2D(texture, vUv);',
'gl_FragColor.r = m[0] * c.r + m[1] * c.g + m[2] * c.b + m[3] * c.a + m[4];',
'gl_FragColor.g = m[5] * c.r + m[6] * c.g + m[7] * c.b + m[8] * c.a + m[9];',
'gl_FragColor.b = m[10] * c.r + m[11] * c.g + m[12] * c.b + m[13] * c.a + m[14];',
'gl_FragColor.a = m[15] * c.r + m[16] * c.g + m[17] * c.b + m[18] * c.a + m[19];',
'}',
].join('\n');
_filter.colorMatrix.SHADER.WITHOUT_ALPHA = [
'precision highp float;',
'varying vec2 vUv;',
'uniform sampler2D texture;',
'uniform float m[20];',
'void main(void) {',
'vec4 c = texture2D(texture, vUv);',
'gl_FragColor.r = m[0] * c.r + m[1] * c.g + m[2] * c.b + m[4];',
'gl_FragColor.g = m[5] * c.r + m[6] * c.g + m[7] * c.b + m[9];',
'gl_FragColor.b = m[10] * c.r + m[11] * c.g + m[12] * c.b + m[14];',
'gl_FragColor.a = c.a;',
'}',
].join('\n');
_filter.brightness = function (brightness) {
const b = (brightness || 0) + 1;
_filter.colorMatrix([
b, 0, 0, 0, 0,
0, b, 0, 0, 0,
0, 0, b, 0, 0,
0, 0, 0, 1, 0,
]);
};
_filter.saturation = function (amount) {
const x = (amount || 0) * 2 / 3 + 1;
const y = ((x - 1) * -0.5);
_filter.colorMatrix([
x, y, y, 0, 0,
y, x, y, 0, 0,
y, y, x, 0, 0,
0, 0, 0, 1, 0,
]);
};
_filter.desaturate = function () {
_filter.saturation(-1);
};
_filter.contrast = function (amount) {
const v = (amount || 0) + 1;
const o = -128 * (v - 1);
_filter.colorMatrix([
v, 0, 0, 0, o,
0, v, 0, 0, o,
0, 0, v, 0, o,
0, 0, 0, 1, 0,
]);
};
_filter.negative = function () {
_filter.contrast(-2);
};
_filter.hue = function (rotation) {
rotation = (rotation || 0) / 180 * Math.PI;
const cos = Math.cos(rotation);
const sin = Math.sin(rotation);
const lumR = 0.213;
const lumG = 0.715;
const lumB = 0.072;
_filter.colorMatrix([
lumR + cos * (1 - lumR) + sin * (-lumR), lumG + cos * (-lumG) + sin * (-lumG), lumB + cos * (-lumB) + sin * (1 - lumB), 0, 0,
lumR + cos * (-lumR) + sin * (0.143), lumG + cos * (1 - lumG) + sin * (0.140), lumB + cos * (-lumB) + sin * (-0.283), 0, 0,
lumR + cos * (-lumR) + sin * (-(1 - lumR)), lumG + cos * (-lumG) + sin * (lumG), lumB + cos * (1 - lumB) + sin * (lumB), 0, 0,
0, 0, 0, 1, 0,
]);
};
_filter.desaturateLuminance = function () {
_filter.colorMatrix([
0.2764723, 0.9297080, 0.0938197, 0, -37.1,
0.2764723, 0.9297080, 0.0938197, 0, -37.1,
0.2764723, 0.9297080, 0.0938197, 0, -37.1,
0, 0, 0, 1, 0,
]);
};
_filter.sepia = function () {
_filter.colorMatrix([
0.393, 0.7689999, 0.18899999, 0, 0,
0.349, 0.6859999, 0.16799999, 0, 0,
0.272, 0.5339999, 0.13099999, 0, 0,
0, 0, 0, 1, 0,
]);
};
_filter.brownie = function () {
_filter.colorMatrix([
0.5997023498159715, 0.34553243048391263, -0.2708298674538042, 0, 47.43192855600873,
-0.037703249837783157, 0.8609577587992641, 0.15059552388459913, 0, -36.96841498319127,
0.24113635128153335, -0.07441037908422492, 0.44972182064877153, 0, -7.562075277591283,
0, 0, 0, 1, 0,
]);
};
_filter.vintagePinhole = function () {
_filter.colorMatrix([
0.6279345635605994, 0.3202183420819367, -0.03965408211312453, 0, 9.651285835294123,
0.02578397704808868, 0.6441188644374771, 0.03259127616149294, 0, 7.462829176470591,
0.0466055556782719, -0.0851232987247891, 0.5241648018700465, 0, 5.159190588235296,
0, 0, 0, 1, 0,
]);
};
_filter.kodachrome = function () {
_filter.colorMatrix([
1.1285582396593525, -0.3967382283601348, -0.03992559172921793, 0, 63.72958762196502,
-0.16404339962244616, 1.0835251566291304, -0.05498805115633132, 0, 24.732407896706203,
-0.16786010706155763, -0.5603416277695248, 1.6014850761964943, 0, 35.62982807460946,
0, 0, 0, 1, 0,
]);
};
_filter.technicolor = function () {
_filter.colorMatrix([
1.9125277891456083, -0.8545344976951645, -0.09155508482755585, 0, 11.793603434377337,
-0.3087833385928097, 1.7658908555458428, -0.10601743074722245, 0, -70.35205161461398,
-0.231103377548616, -0.7501899197440212, 1.847597816108189, 0, 30.950940869491138,
0, 0, 0, 1, 0,
]);
};
_filter.polaroid = function () {
_filter.colorMatrix([
1.438, -0.062, -0.062, 0, 0,
-0.122, 1.378, -0.122, 0, 0,
-0.016, -0.016, 1.483, 0, 0,
0, 0, 0, 1, 0,
]);
};
_filter.shiftToBGR = function () {
_filter.colorMatrix([
0, 0, 1, 0, 0,
0, 1, 0, 0, 0,
1, 0, 0, 0, 0,
0, 0, 0, 1, 0,
]);
};
// -------------------------------------------------------------------------
// Convolution Filter
_filter.convolution = function (matrix) {
const m = new Float32Array(matrix);
const pixelSizeX = 1 / _width;
const pixelSizeY = 1 / _height;
const program = _compileShader(_filter.convolution.SHADER);
gl.uniform1fv(program.uniform.m, m);
gl.uniform2f(program.uniform.px, pixelSizeX, pixelSizeY);
_draw();
};
_filter.convolution.SHADER = [
'precision highp float;',
'varying vec2 vUv;',
'uniform sampler2D texture;',
'uniform vec2 px;',
'uniform float m[9];',
'void main(void) {',
'vec4 c11 = texture2D(texture, vUv - px);', // top left
'vec4 c12 = texture2D(texture, vec2(vUv.x, vUv.y - px.y));', // top center
'vec4 c13 = texture2D(texture, vec2(vUv.x + px.x, vUv.y - px.y));', // top right
'vec4 c21 = texture2D(texture, vec2(vUv.x - px.x, vUv.y) );', // mid left
'vec4 c22 = texture2D(texture, vUv);', // mid center
'vec4 c23 = texture2D(texture, vec2(vUv.x + px.x, vUv.y) );', // mid right
'vec4 c31 = texture2D(texture, vec2(vUv.x - px.x, vUv.y + px.y) );', // bottom left
'vec4 c32 = texture2D(texture, vec2(vUv.x, vUv.y + px.y) );', // bottom center
'vec4 c33 = texture2D(texture, vUv + px );', // bottom right
'gl_FragColor = ',
'c11 * m[0] + c12 * m[1] + c22 * m[2] +',
'c21 * m[3] + c22 * m[4] + c23 * m[5] +',
'c31 * m[6] + c32 * m[7] + c33 * m[8];',
'gl_FragColor.a = c22.a;',
'}',
].join('\n');
_filter.detectEdges = function () {
_filter.convolution.call(this, [
0, 1, 0,
1, -4, 1,
0, 1, 0,
]);
};
_filter.sobelX = function () {
_filter.convolution.call(this, [
-1, 0, 1,
-2, 0, 2,
-1, 0, 1,
]);
};
_filter.sobelY = function () {
_filter.convolution.call(this, [
-1, -2, -1,
0, 0, 0,
1, 2, 1,
]);
};
_filter.sharpen = function (amount) {
const a = amount || 1;
_filter.convolution.call(this, [
0, -1 * a, 0,
-1 * a, 1 + 4 * a, -1 * a,
0, -1 * a, 0,
]);
};
_filter.emboss = function (size) {
const s = size || 1;
_filter.convolution.call(this, [
-2 * s, -1 * s, 0,
-1 * s, 1, 1 * s,
0, 1 * s, 2 * s,
]);
};
// -------------------------------------------------------------------------
// Blur Filter
_filter.blur = function (size) {
const blurSizeX = (size / 7) / _width;
const blurSizeY = (size / 7) / _height;
const program = _compileShader(_filter.blur.SHADER);
// Vertical
gl.uniform2f(program.uniform.px, 0, blurSizeY);
_draw(DRAW.INTERMEDIATE);
// Horizontal
gl.uniform2f(program.uniform.px, blurSizeX, 0);
_draw();
};
_filter.blur.SHADER = [
'precision highp float;',
'varying vec2 vUv;',
'uniform sampler2D texture;',
'uniform vec2 px;',
'void main(void) {',
'gl_FragColor = vec4(0.0);',
'gl_FragColor += texture2D(texture, vUv + vec2(-7.0*px.x, -7.0*px.y))*0.0044299121055113265;',
'gl_FragColor += texture2D(texture, vUv + vec2(-6.0*px.x, -6.0*px.y))*0.00895781211794;',
'gl_FragColor += texture2D(texture, vUv + vec2(-5.0*px.x, -5.0*px.y))*0.0215963866053;',
'gl_FragColor += texture2D(texture, vUv + vec2(-4.0*px.x, -4.0*px.y))*0.0443683338718;',
'gl_FragColor += texture2D(texture, vUv + vec2(-3.0*px.x, -3.0*px.y))*0.0776744219933;',
'gl_FragColor += texture2D(texture, vUv + vec2(-2.0*px.x, -2.0*px.y))*0.115876621105;',
'gl_FragColor += texture2D(texture, vUv + vec2(-1.0*px.x, -1.0*px.y))*0.147308056121;',
'gl_FragColor += texture2D(texture, vUv )*0.159576912161;',
'gl_FragColor += texture2D(texture, vUv + vec2( 1.0*px.x, 1.0*px.y))*0.147308056121;',
'gl_FragColor += texture2D(texture, vUv + vec2( 2.0*px.x, 2.0*px.y))*0.115876621105;',
'gl_FragColor += texture2D(texture, vUv + vec2( 3.0*px.x, 3.0*px.y))*0.0776744219933;',
'gl_FragColor += texture2D(texture, vUv + vec2( 4.0*px.x, 4.0*px.y))*0.0443683338718;',
'gl_FragColor += texture2D(texture, vUv + vec2( 5.0*px.x, 5.0*px.y))*0.0215963866053;',
'gl_FragColor += texture2D(texture, vUv + vec2( 6.0*px.x, 6.0*px.y))*0.00895781211794;',
'gl_FragColor += texture2D(texture, vUv + vec2( 7.0*px.x, 7.0*px.y))*0.0044299121055113265;',
'}',
].join('\n');
// -------------------------------------------------------------------------
// Pixelate Filter
_filter.pixelate = function (size) {
const blurSizeX = (size) / _width;
const blurSizeY = (size) / _height;
const program = _compileShader(_filter.pixelate.SHADER);
// Horizontal
gl.uniform2f(program.uniform.size, blurSizeX, blurSizeY);
_draw();
};
_filter.pixelate.SHADER = [
'precision highp float;',
'varying vec2 vUv;',
'uniform vec2 size;',
'uniform sampler2D texture;',
'vec2 pixelate(vec2 coord, vec2 size) {',
'return floor( coord / size ) * size;',
'}',
'void main(void) {',
'gl_FragColor = vec4(0.0);',
'vec2 coord = pixelate(vUv, size);',
'gl_FragColor += texture2D(texture, coord);',
'}',
].join('\n');
};
exports.Canvas = WebGLImageFilter;

32
src/node.js Normal file
View File

@ -0,0 +1,32 @@
const console = require('console');
const tf = require('@tensorflow/tfjs-node');
const human = require('..'); // this resolves to project root which is '@vladmandic/human'
const logger = new console.Console({
stdout: process.stdout,
stderr: process.stderr,
ignoreErrors: true,
groupIndentation: 2,
inspectOptions: {
showHidden: true,
depth: 5,
colors: true,
showProxy: true,
maxArrayLength: 1024,
maxStringLength: 10240,
breakLength: 300,
compact: 64,
sorted: false,
getters: true,
},
});
async function main() {
await tf.ready();
logger.info('Human:', human.version);
logger.info('Default Configuration', human.defaults);
logger.info('TFJS Version:', tf.version_core, 'Backend:', tf.getBackend());
logger.info('TFJS Flags:', tf.env().features);
}
main();